PROJECT - (15th Jan, 2021 - 31st Jan, 2021)

In [1385]:
import pandas as pd         # To read the data set
import numpy as np          # Importing numpy library
import seaborn as sns       # For data visualization
import matplotlib.pyplot as plt      # Necessary library for plotting graphs
%matplotlib inline
sns.set(color_codes = True)

from sklearn import metrics          # Importing metrics
from sklearn.model_selection import train_test_split       # Splitting data into train and test set
from sklearn.metrics import classification_report, accuracy_score, recall_score, f1_score, roc_auc_score, average_precision_score, confusion_matrix
from sklearn.preprocessing import StandardScaler           # Importing to standardize the data
from sklearn.impute import SimpleImputer                   # Importing to fill in zero values in the data
from sklearn.preprocessing import LabelEncoder  
from sklearn.preprocessing import PolynomialFeatures       # Importing polynomial features library
from sklearn.decomposition import PCA           # Importing to run pca analysis on data

from sklearn.model_selection import KFold, cross_val_score           # Importing kfold for cross validation
from sklearn.model_selection import GridSearchCV, RandomizedSearchCV           # Importing for hypertuning model
from sklearn.cluster import KMeans              # For KMeans cluster model building
from scipy.stats import zscore       # Import zscore library
from scipy.spatial.distance import cdist        # Importing cdist functionality for elbow graph
import tensorflow           # Importing tensorflow library
from tensorflow.keras.models import Sequential             # Importing tensorflow library
from tensorflow.keras.utils import to_categorical          # Importing tensorflow library
from tensorflow.keras import optimizers                    # Importing optimizers
from tensorflow.keras.layers import Dense, Dropout, Activation       # Importing necessary libraries 

from skimage.color import rgb2gray              # Loading color library
from sklearn.preprocessing import OneHotEncoder            # Library for one hot encoding
from sklearn.metrics import confusion_matrix               # Loading necessary library
from tensorflow.keras.preprocessing.image import ImageDataGenerator             # Loading image generator 
from tensorflow import keras         # Loading keras libaray 

PART ONE // Regressor Model Building

1. Import Data.

In [12]:
df = pd.read_csv('Part- 1,2&3 - Signal.csv')
In [13]:
df.head()
Out[13]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Signal_Strength
0 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 5
1 7.8 0.88 0.00 2.6 0.098 25.0 67.0 0.9968 3.20 0.68 9.8 5
2 7.8 0.76 0.04 2.3 0.092 15.0 54.0 0.9970 3.26 0.65 9.8 5
3 11.2 0.28 0.56 1.9 0.075 17.0 60.0 0.9980 3.16 0.58 9.8 6
4 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 5
In [14]:
df.tail()
Out[14]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Signal_Strength
1594 6.2 0.600 0.08 2.0 0.090 32.0 44.0 0.99490 3.45 0.58 10.5 5
1595 5.9 0.550 0.10 2.2 0.062 39.0 51.0 0.99512 3.52 0.76 11.2 6
1596 6.3 0.510 0.13 2.3 0.076 29.0 40.0 0.99574 3.42 0.75 11.0 6
1597 5.9 0.645 0.12 2.0 0.075 32.0 44.0 0.99547 3.57 0.71 10.2 5
1598 6.0 0.310 0.47 3.6 0.067 18.0 42.0 0.99549 3.39 0.66 11.0 6
In [15]:
df.shape
Out[15]:
(1599, 12)
In [16]:
df.size
Out[16]:
19188
In [17]:
df.isnull().sum()
Out[17]:
Parameter 1        0
Parameter 2        0
Parameter 3        0
Parameter 4        0
Parameter 5        0
Parameter 6        0
Parameter 7        0
Parameter 8        0
Parameter 9        0
Parameter 10       0
Parameter 11       0
Signal_Strength    0
dtype: int64
In [18]:
df.dtypes
Out[18]:
Parameter 1        float64
Parameter 2        float64
Parameter 3        float64
Parameter 4        float64
Parameter 5        float64
Parameter 6        float64
Parameter 7        float64
Parameter 8        float64
Parameter 9        float64
Parameter 10       float64
Parameter 11       float64
Signal_Strength      int64
dtype: object
In [19]:
df.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 1599 entries, 0 to 1598
Data columns (total 12 columns):
 #   Column           Non-Null Count  Dtype  
---  ------           --------------  -----  
 0   Parameter 1      1599 non-null   float64
 1   Parameter 2      1599 non-null   float64
 2   Parameter 3      1599 non-null   float64
 3   Parameter 4      1599 non-null   float64
 4   Parameter 5      1599 non-null   float64
 5   Parameter 6      1599 non-null   float64
 6   Parameter 7      1599 non-null   float64
 7   Parameter 8      1599 non-null   float64
 8   Parameter 9      1599 non-null   float64
 9   Parameter 10     1599 non-null   float64
 10  Parameter 11     1599 non-null   float64
 11  Signal_Strength  1599 non-null   int64  
dtypes: float64(11), int64(1)
memory usage: 150.0 KB

1.The dataset consists of (1599 entries & 12 columns).

2.On checking for lapses in the dataset we can conclude by saying that the data does not have any null values & does not have any major cleaning that needs to be done.

In [20]:
plt.figure(figsize=(14,9))
sns.boxplot(data = df, orient = 'h', palette = 'Set1', dodge = False);

Observation:

From the above boxplot we can see that there are outliers are present in mostly all columns. I will be finding the outliers counts in individual attributes analysis and fixing the outliers after visualization and analysis of each attribute.

2. Data Analysis & Visualisation.

Analysing each attribute with the help of plots.

A. Parameter 1

In [21]:
# Plotting a visual analysis of parameter 1

fig,(ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 1'], ax = ax1, color = 'red')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 1', fontsize = 15)

sns.boxplot(df['Parameter 1'], ax = ax2, color = 'red')
ax2.set_title('Box Plot', fontsize = 15)
ax2.set_xlabel('Parameter 1', fontsize = 15)
Out[21]:
Text(0.5, 0, 'Parameter 1')
In [22]:
# Checking outliers in parameter 1 

outliers_cols0 = []

Q1 = df['Parameter 1'].quantile(0.25)    # 1st Quartile
Q3 = df['Parameter 1'].quantile(0.75)    # 3rd Quartile

IQR = Q3 - Q1         # Interquartile range

LTV_para1 = Q1 - 1.5 * IQR               # Lower range bound
UTV_para1 = Q3 + 1.5 * IQR               # Upper range bound 


print('Interquartile range =', IQR)
print('Parameter 1 <', LTV_para1, 'and >', UTV_para1, 'are outliers')
print('Number of outliers in parameter 1 column below the lower whisker =', df[df['Parameter 1'] < (Q1 - (1.5*IQR))]['Parameter 1'].count())
print('Number of outliers in parameter 1 column above the upper whisker =', df[df['Parameter 1'] > (Q3 + (1.5*IQR))]['Parameter 1'].count())

outliers_cols0.append('Parameter 1')
upperLowerBound_Disct = {'Parameter 1' : UTV_para1}
Interquartile range = 2.0999999999999996
Parameter 1 < 3.95 and > 12.349999999999998 are outliers
Number of outliers in parameter 1 column below the lower whisker = 0
Number of outliers in parameter 1 column above the upper whisker = 49

Observation :

We can observe from the outlier analysis above that we have a total of 49 outliers in "Parameter 1" which is towards the upper whisker. We will treat them later on.

B. Parameter 2

In [23]:
# Plotting a visual analysis of parameter 2

fig,(ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 2'], ax = ax1, color = 'b')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 2', fontsize = 15)

sns.boxplot(df['Parameter 2'], ax = ax2, color = 'b')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 2', fontsize = 15)
Out[23]:
Text(0.5, 0, 'Parameter 2')
In [24]:
# Checking outliers in parameter 2

outliers_cols1 = []

Q1 = df['Parameter 2'].quantile(0.25)    # 1st Quartile
Q3 = df['Parameter 2'].quantile(0.75)    # 3rd Quartile

IQR = Q3 - Q1         # Interquartile range

LTV_para2 = Q1 - 1.5 * IQR               # Lower range bound
UTV_para2 = Q3 + 1.5 * IQR               # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 2 <', LTV_para2, 'and >', UTV_para2, 'are outliers')
print('Number of outliers in the parameter 2 column below the lower whisker =', df[df['Parameter 2'] < (Q1 - (1.5*IQR))]['Parameter 2'].count())
print('Number of outliers in the parameter 2 column above the upper whisker =', df[df['Parameter 2'] > (Q3 + (1.5*IQR))]['Parameter 2'].count())

outliers_cols1.append('Parameter 2')
upperLowerBound_Disct = {'Parameter 2' : UTV_para2}
Interquartile range = 0.25
Parameter 2 < 0.015000000000000013 and > 1.0150000000000001 are outliers
Number of outliers in the parameter 2 column below the lower whisker = 0
Number of outliers in the parameter 2 column above the upper whisker = 19

Observation :

We can observe from the outlier analysis above that we have a total of 19 outliers in "Parameter 2" which is towards the upper whisker. We will treat them later on.

C. Parameter 3

In [25]:
# Plotting a visual analysis of parameter 3

fig,(ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 3'], ax = ax1, color = 'green')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 3', fontsize = 15)

sns.boxplot(df['Parameter 3'], ax = ax2, color = 'green')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 3', fontsize = 15)
Out[25]:
Text(0.5, 0, 'Parameter 3')
In [26]:
# Checking outliers in parameter 3

outliers_cols2 = []

Q1 = df['Parameter 3'].quantile(0.25)     # 1st Quartile
Q3 = df['Parameter 3'].quantile(0.75)     # 3rd Quartile

IQR = Q3 - Q1            # Interquartile range

LTV_para3 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para3 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range = ', IQR)
print('Parameter 3 <', LTV_para3, 'and >', UTV_para3, 'are outliers')
print('Number of outliers in the parameter 3 column below the lower whisker =', df[df['Parameter 3'] < (Q1 - (1.5*IQR))]['Parameter 3'].count())
print('Number of outliers in the parameter 3 column above the upper whisker =', df[df['Parameter 3'] > (Q3 + (1.5*IQR))]['Parameter 3'].count())

outliers_cols2.append('Parameter 3')
upperLowerBound_Disct = {'Parameter 3' : UTV_para3}
Interquartile range =  0.32999999999999996
Parameter 3 < -0.4049999999999999 and > 0.9149999999999999 are outliers
Number of outliers in the parameter 3 column below the lower whisker = 0
Number of outliers in the parameter 3 column above the upper whisker = 1

Observation :

We can observe from the outlier analysis above that we have a total of 1 outlier in "Parameter 3" which is towards the upper whisker. We will treat them later on.

D. Parameter 4

In [27]:
# Plotting a visual analysis of parameter 4

fig,(ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 4'], ax = ax1, color = 'purple')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 4', fontsize = 15)

sns.boxplot(df['Parameter 4'], ax = ax2, color = 'purple')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 4', fontsize = 15)
Out[27]:
Text(0.5, 0, 'Parameter 4')
In [28]:
# Checking outliers in parameter 4

outliers_cols3 = []

Q1 = df['Parameter 4'].quantile(0.25)     # 1st Quartile
Q3 = df['Parameter 4'].quantile(0.75)     # 3rd Quartile

IQR = Q3 - Q1            # Interquartile range

LTV_para4 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para4 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range = ', IQR)
print('Parameter 4 <', LTV_para4, 'and >', UTV_para4, 'are outliers')
print('Number of outliers in the parameter 4 column below the lower whisker =', df[df['Parameter 4'] < (Q1 - (1.5*IQR))]['Parameter 4'].count())
print('Number of outliers in the parameter 4 column above the upper whisker =', df[df['Parameter 4'] > (Q3 + (1.5*IQR))]['Parameter 4'].count())

outliers_cols3.append('Parameter 4')
upperLowerBound_Disct = {'Parameter 4' : UTV_para4}
Interquartile range =  0.7000000000000002
Parameter 4 < 0.8499999999999996 and > 3.6500000000000004 are outliers
Number of outliers in the parameter 4 column below the lower whisker = 0
Number of outliers in the parameter 4 column above the upper whisker = 155

Observation :

We can observe from the outlier analysis above that we have a total of 155 outliers in "Parameter 4" which is towards the upper whisker. We will treat them later on.

E. Parameter 5

In [29]:
# Plotting a visual analysis of parameter 5

fig,(ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 5'], ax = ax1, color = 'orange')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 5', fontsize = 15)

sns.boxplot(df['Parameter 5'], ax = ax2, color = 'orange')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 5', fontsize = 15)
Out[29]:
Text(0.5, 0, 'Parameter 5')
In [30]:
# Checking outliers in parameter 5

outliers_cols4 = []

Q1 = df['Parameter 5'].quantile(0.25)     # 1st Quartile
Q3 = df['Parameter 5'].quantile(0.75)     # 3rd Quartile

IQR = Q3 - Q1            # Interquartile range

LTV_para5 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para5 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range = ', IQR)
print('Parameter 5 <', LTV_para5, 'and >', UTV_para5, 'are outliers')
print('Number of outliers in the parameter 5 column below the lower whisker =', df[df['Parameter 5'] < (Q1 - (1.5*IQR))]['Parameter 5'].count())
print('Number of outliers in the parameter 5 column above the upper whisker =', df[df['Parameter 5'] > (Q3 + (1.5*IQR))]['Parameter 5'].count())

outliers_cols4.append('Parameter 5')
upperLowerBound_Disct = {'Parameter 5' : UTV_para5}
Interquartile range =  0.01999999999999999
Parameter 5 < 0.04000000000000002 and > 0.11999999999999998 are outliers
Number of outliers in the parameter 5 column below the lower whisker = 9
Number of outliers in the parameter 5 column above the upper whisker = 103

Observation :

We can observe from the outlier analysis above that we have a total of 112 outlier in "Parameter 5", 9 towards the lower whisker and 103 towards the upper whisker. We will treat them later on.

F. Parameter 6

In [31]:
# Plotting a visual analysis of parameter 6

fig, (ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 6'], ax = ax1, color = 'black')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 6', fontsize = 15)

sns.boxplot(df['Parameter 6'], ax = ax2, color = 'black')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 6', fontsize = 15)
Out[31]:
Text(0.5, 0, 'Parameter 6')
In [32]:
# Checking outliers in parameter 6

outliers_cols5 = []

Q1 = df['Parameter 6'].quantile(0.25)     # 1st quartile
Q3 = df['Parameter 6'].quantile(0.75)     # 3rd quartile

IQR = Q3 - Q1          #Interquartile range

LTV_para6 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para6 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 6 <', LTV_para6, ' and >', UTV_para6, 'are outliers')
print('Number of outliers in the parameter 6 column below the lower whisker =', df[df['Parameter 6'] < (Q1 - (1.5*IQR))]['Parameter 6'].count())
print('Number of outliers in the parameter 6 column above the upper whisker =', df[df['Parameter 6'] > (Q3 + (1.5*IQR))]['Parameter 6'].count())

outliers_cols5.append('Parameter 6')
upperLowerBand_Disct = {'Parameter 6' : UTV_para6}
Interquartile range = 14.0
Parameter 6 < -14.0  and > 42.0 are outliers
Number of outliers in the parameter 6 column below the lower whisker = 0
Number of outliers in the parameter 6 column above the upper whisker = 30

Observation :

We can observe from the outlier analysis above that we have a total of 30 outliers in "Parameter 6" which is towards the upper whisker. We will treat them later on.

G. Parameter 7

In [33]:
# Plotting a visual analysis of parameter 7

fig, (ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 7'], ax = ax1, color = 'brown')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 7', fontsize = 15)

sns.boxplot(df['Parameter 7'], ax = ax2, color = 'brown')
ax2.set_title('Boxplot', fontsize= 15)
ax2.set_xlabel('Parameter 7', fontsize = 15)
Out[33]:
Text(0.5, 0, 'Parameter 7')
In [34]:
# Checking outliers in parameter 7

outliers_cols6 = []

Q1 = df['Parameter 7'].quantile(0.25)      # 1st quartile
Q3 = df['Parameter 7'].quantile(0.75)      # 3rd quartile

IQR = Q3 - Q1            # Interquartile range

LTV_para7 = Q1 - 1.5 * IQR                 # Lower range bound
UTV_para7 = Q3 + 1.5 * IQR                 # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 7 <', LTV_para7, 'and >', UTV_para7, 'are outliers')
print('Number of outliers in the parameter 7 column below the lower whisker =', df[df['Parameter 7'] < (Q1 - (1.5*IQR))]['Parameter 7'].count())
print('Number of outliers in the parameter 7 column above the upper whisker =', df[df['Parameter 7'] > (Q3 + (1.5*IQR))]['Parameter 7'].count())

outliers_cols6.append('Parameter 7')
upperLowerBound_Disct = {'Parameter 7' : UTV_para7}
Interquartile range = 40.0
Parameter 7 < -38.0 and > 122.0 are outliers
Number of outliers in the parameter 7 column below the lower whisker = 0
Number of outliers in the parameter 7 column above the upper whisker = 55

Observation :

We can observe from the outlier analysis above that we have a total of 55 outliers in "Parameter 6" which is towards the upper whisker. We will treat them later on.

H. Parameter 8

In [35]:
# Plotting a visual analysis of parameter 8

fig, (ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 8'], ax = ax1, color = 'pink')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 8', fontsize = 15)

sns.boxplot(df['Parameter 8'], ax = ax2, color = 'pink')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 8', fontsize = 15)
Out[35]:
Text(0.5, 0, 'Parameter 8')
In [36]:
# Checking outliers in parameter 8

outliers_cols7 = []

Q1 = df['Parameter 8'].quantile(0.25)     # 1st quartile
Q3 = df['Parameter 8'].quantile(0.75)     # 3rd quartile

IQR = Q3 - Q1           # Interquartile range

LTV_para8 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para8 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 8 <', LTV_para8, 'and >', UTV_para8, 'are outliers')
print('Number of outliers in the parameter 8 column below the lower whisker =', df[df['Parameter 8'] < (Q1 - (1.5*IQR))]['Parameter 8'].count())
print('Number of outliers in the parameter 8 column below the upper whisker =', df[df['Parameter 8'] > (Q3 + (1.5*IQR))]['Parameter 8'].count())

outliers_cols7.append('Parameter 8')
upperLowerBound_Disct = {'Parameter 8' : UTV_para8}
Interquartile range = 0.002234999999999876
Parameter 8 < 0.9922475000000002 and > 1.0011874999999997 are outliers
Number of outliers in the parameter 8 column below the lower whisker = 21
Number of outliers in the parameter 8 column below the upper whisker = 24

Observation :

We can observe from the outlier analysis above that we have a total of 45 outlier in "Parameter 8", 21 towards the lower whisker and 24 towards the upper whisker. We will treat them later on.

I. Parameter 9

In [37]:
# Plotting a visual analysis of parameter 9

fig, (ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 9'], ax = ax1, color = 'grey')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 9', fontsize = 15)

sns.boxplot(df['Parameter 9'], ax = ax2, color = 'grey')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 9', fontsize = 15)
Out[37]:
Text(0.5, 0, 'Parameter 9')
In [38]:
# Checking outliers in parameter 9

outliers_cols8 = []

Q1 = df['Parameter 9'].quantile(0.25)     # 1st quartile
Q3 = df['Parameter 9'].quantile(0.75)     # 3rd quartile

IQR = Q3 - Q1           # Interquartile range

LTV_para9 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para9 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 9 <', LTV_para9, 'and >', UTV_para9, 'are outliers')
print('Number of outliers in the parameter 9 column below the lower whisker =', df[df['Parameter 9'] < (Q1 - (1.5*IQR))]['Parameter 9'].count())
print('Number of outliers in the parameter 9 column below the upper whisker =', df[df['Parameter 9'] > (Q3 + (1.5*IQR))]['Parameter 9'].count())

outliers_cols8.append('Parameter 9')
upperLowerBound_Disct = {'Parameter 9' : UTV_para9}
Interquartile range = 0.18999999999999995
Parameter 9 < 2.925 and > 3.6849999999999996 are outliers
Number of outliers in the parameter 9 column below the lower whisker = 14
Number of outliers in the parameter 9 column below the upper whisker = 21

Observation :

We can observe from the outlier analysis above that we have a total of 35 outlier in "Parameter 9", 14 towards the lower whisker and 21 towards the upper whisker. We will treat them later on.

J. Parameter 10

In [39]:
# Plotting a visual analysis of parameter 10

fig, (ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 10'], ax = ax1, color = 'gold')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 10', fontsize = 15)

sns.boxplot(df['Parameter 10'], ax = ax2, color = 'gold')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 10', fontsize = 15)
Out[39]:
Text(0.5, 0, 'Parameter 10')
In [40]:
# Checking outliers in parameter 10

outliers_cols9 = []

Q1 = df['Parameter 10'].quantile(0.25)     # 1st quartile
Q3 = df['Parameter 10'].quantile(0.75)     # 3rd quartile

IQR = Q3 - Q1           # Interquartile range

LTV_para10 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para10 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 10 <', LTV_para10, 'and >', UTV_para10, 'are outliers')
print('Number of outliers in the parameter 10 column below the lower whisker =', df[df['Parameter 10'] < (Q1 - (1.5*IQR))]['Parameter 10'].count())
print('Number of outliers in the parameter 10 column below the upper whisker =', df[df['Parameter 10'] > (Q3 + (1.5*IQR))]['Parameter 10'].count())

outliers_cols9.append('Parameter 10')
upperLowerBound_Disct = {'Parameter 10' : UTV_para10}
Interquartile range = 0.17999999999999994
Parameter 10 < 0.28000000000000014 and > 0.9999999999999999 are outliers
Number of outliers in the parameter 10 column below the lower whisker = 0
Number of outliers in the parameter 10 column below the upper whisker = 59

Observation :

We can observe from the outlier analysis above that we have a total of 59 outliers in "Parameter 10" which is towards the upper whisker. We will treat them later on.

K. Parameter 11

In [41]:
# Plotting a visual analysis of parameter 11

fig, (ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 11'], ax = ax1, color = 'white')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 11', fontsize = 15)

sns.boxplot(df['Parameter 11'], ax = ax2, color = 'white')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 11', fontsize = 15)
Out[41]:
Text(0.5, 0, 'Parameter 11')
In [42]:
# Checking outliers in parameter 11

outliers_cols10 = []

Q1 = df['Parameter 11'].quantile(0.25)     # 1st quartile
Q3 = df['Parameter 11'].quantile(0.75)     # 3rd quartile

IQR = Q3 - Q1           # Interquartile range

LTV_para11 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para11 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 11 <', LTV_para11, 'and >', UTV_para11, 'are outliers')
print('Number of outliers in the parameter 11 column below the lower whisker =', df[df['Parameter 11'] < (Q1 - (1.5*IQR))]['Parameter 11'].count())
print('Number of outliers in the parameter 11 column below the upper whisker =', df[df['Parameter 11'] > (Q3 + (1.5*IQR))]['Parameter 11'].count())

outliers_cols10.append('Parameter 11')
upperLowerBound_Disct = {'Parameter 11' : UTV_para11}
Interquartile range = 1.5999999999999996
Parameter 11 < 7.1000000000000005 and > 13.5 are outliers
Number of outliers in the parameter 11 column below the lower whisker = 0
Number of outliers in the parameter 11 column below the upper whisker = 13

Observation :

We can observe from the outlier analysis above that we have a total of 13 outliers in "Parameter 11" which is towards the upper whisker. We will treat them later on.

L. Signal_Strength

In [43]:
# Plotting a visual analysis of signal_strength

fig, (ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Signal_Strength'], ax = ax1, color = 'yellow')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Signal_Strength', fontsize = 15)

sns.boxplot(df['Signal_Strength'], ax = ax2, color = 'yellow')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Signal_Strength', fontsize = 15)
Out[43]:
Text(0.5, 0, 'Signal_Strength')
In [44]:
# Checking outliers in signal_strength

outliers_cols11 = []

Q1 = df['Signal_Strength'].quantile(0.25)     # 1st quartile
Q3 = df['Signal_Strength'].quantile(0.75)     # 3rd quartile

IQR = Q3 - Q1           # Interquartile range

LTV_para12 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para12 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range =', IQR)
print('Signal_Strength <', LTV_para12, 'and >', UTV_para12, 'are outliers')
print('Number of outliers in the Signal_Strength column below the lower whisker =', df[df['Signal_Strength'] < (Q1 - (1.5*IQR))]['Signal_Strength'].count())
print('Number of outliers in the Signal_Strength column below the upper whisker =', df[df['Signal_Strength'] > (Q3 + (1.5*IQR))]['Signal_Strength'].count())

outliers_cols11.append('Signal_Strength')
upperLowerBound_Disct = {'Signal_Strength' : UTV_para12}
Interquartile range = 1.0
Signal_Strength < 3.5 and > 7.5 are outliers
Number of outliers in the Signal_Strength column below the lower whisker = 10
Number of outliers in the Signal_Strength column below the upper whisker = 18

Observation :

We can observe from the outlier analysis above that we have a total of 28 outlier in "Signal_Strength", 10 towards the lower whisker and 18 towards the upper whisker. We will treat them later on.

In [45]:
# Pairplot visual analysis to check correlation amongst different fields

sns.pairplot(df, diag_kind = 'kde');
In [46]:
df.corr()

plt.figure(figsize = (18,12))
sns.heatmap(df.corr(), annot = True, fmt = 'g');

Observation : From the above pair plot & heatmap we can infer the relationship amongst the attributes and target column as follows:

  1. We have some strong negative correlations between (Parameter 1 & Parameter 9) (-0.68), (Parameter 3 & Parameter 9) (-0.54), (Parameter 2 & Parameter 3) (-0.55), (Parameter 8 & Parameter 11) (-0.49),
  1. Signal_Strength have some positive linear relation with (Parameter 11 that means if the quality of Parameter 11 goes up Signal_Strength will be more).
  1. We also have positive correlations between (Parameter 1 & Parameter 3) (0.67), (Parameter 1 & Parameter 8) (0.66), (Parameter 7 & Parameter 6) (0.66).
  1. Also it is quite visible that there are multiple gaussians in Parameter 3.
  1. Rest of the relations between other individual attributes are mostly cloud shaped or symmetrical shaped.

Pick one strategy to address the presence outliers and missing values and perform necessary imputation

------------------------ Fixing Outliers ------------------------

  • As we have seen above outlier are present in the given dataset.
  • There are multiple ways to deal with outliers but I mostly prefer either to drop the outliers or repalce it with median/mean.
  • Here I am going to replace the outliers with median because if we drop them, there may be chance to loose some important information which we dont want to as it could hamper our overall anylysis.
In [47]:
# Showing the columns where outliers exist

print('These are the columns which have outliers : \n\n', outliers_cols0, outliers_cols1, outliers_cols3, outliers_cols4, outliers_cols5, outliers_cols6, outliers_cols7, outliers_cols8, outliers_cols9, outliers_cols10, outliers_cols11)
These are the columns which have outliers : 

 ['Parameter 1'] ['Parameter 2'] ['Parameter 4'] ['Parameter 5'] ['Parameter 6'] ['Parameter 7'] ['Parameter 8'] ['Parameter 9'] ['Parameter 10'] ['Parameter 11'] ['Signal_Strength']
In [48]:
df_new = df.copy()
In [49]:
# Treating outliers present in respective columns

for col_name in df_new.columns[:]:
    q1 = df_new[col_name].quantile(0.25)
    q3 = df_new[col_name].quantile(0.75)
    iqr = q3 - q1
    low = q1 - 1.5*iqr
    high = q3 + 1.5*iqr
    
    df_new.loc[(df_new[col_name] < low) | (df_new[col_name] > high), col_name] = df_new[col_name].median()
In [50]:
plt.figure(figsize=(15,8))
sns.boxplot(data = df_new, orient='h', palette='Set1', dodge=False);

Observation :

Now we can see from the above visual analysis of boxplots that most of the outliers are replaced with their median. We can see that most of the outliers are removed, but because of their gaussians replacing it with median values, the attributes raised with new outliers which we can ignore.

In [51]:
df_new.shape
Out[51]:
(1599, 12)
In [52]:
df_new.size
Out[52]:
19188
In [53]:
df_new.head()
Out[53]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Signal_Strength
0 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 5.0
1 7.8 0.88 0.00 2.6 0.098 25.0 67.0 0.9968 3.20 0.68 9.8 5.0
2 7.8 0.76 0.04 2.3 0.092 15.0 54.0 0.9970 3.26 0.65 9.8 5.0
3 11.2 0.28 0.56 1.9 0.075 17.0 60.0 0.9980 3.16 0.58 9.8 6.0
4 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 5.0
In [54]:
df_new.count(axis = 0)
Out[54]:
Parameter 1        1599
Parameter 2        1599
Parameter 3        1599
Parameter 4        1599
Parameter 5        1599
Parameter 6        1599
Parameter 7        1599
Parameter 8        1599
Parameter 9        1599
Parameter 10       1599
Parameter 11       1599
Signal_Strength    1599
dtype: int64

Feature Engineering Techniques

Creating Composite Features, ( "Parameter 2,3,9,10" ratio by dividing the values of "Parameter 9 with 2,3,10" )

In [55]:
df_new.insert(df_new.shape[-1]-1,'Parameter 2,3,9 & 10',df_new['Parameter 9']/(df_new['Parameter 2'] + df_new['Parameter 3'] + df_new['Parameter 10']))
In [56]:
df_new.head()
Out[56]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Parameter 2,3,9 & 10 Signal_Strength
0 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 2.785714 5.0
1 7.8 0.88 0.00 2.6 0.098 25.0 67.0 0.9968 3.20 0.68 9.8 2.051282 5.0
2 7.8 0.76 0.04 2.3 0.092 15.0 54.0 0.9970 3.26 0.65 9.8 2.248276 5.0
3 11.2 0.28 0.56 1.9 0.075 17.0 60.0 0.9980 3.16 0.58 9.8 2.225352 6.0
4 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 2.785714 5.0

Creating Composite Features, ( "Parameter 1 & 11" ratio by dividing the values of "Parameter 11 with 1" )

In [57]:
df_new.insert(df_new.shape[-1]-1, 'Parameter 1 & 11', df_new['Parameter 11']/df_new['Parameter 1'])
In [58]:
df_new.head()
Out[58]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Parameter 2,3,9 & 10 Parameter 1 & 11 Signal_Strength
0 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 2.785714 1.27027 5.0
1 7.8 0.88 0.00 2.6 0.098 25.0 67.0 0.9968 3.20 0.68 9.8 2.051282 1.25641 5.0
2 7.8 0.76 0.04 2.3 0.092 15.0 54.0 0.9970 3.26 0.65 9.8 2.248276 1.25641 5.0
3 11.2 0.28 0.56 1.9 0.075 17.0 60.0 0.9980 3.16 0.58 9.8 2.225352 0.87500 6.0
4 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 2.785714 1.27027 5.0

Creating Composite Features, ( "Parameter 4,5 & 8" ratio by dividing the values of "Parameter 4 with 5, 8" )

In [59]:
df_new.insert(df_new.shape[-1]-1,'Parameter 4,5 & 8',df_new['Parameter 4']/(df_new['Parameter 8'] + df_new['Parameter 5']))
In [60]:
df_new.head()
Out[60]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Parameter 2,3,9 & 10 Parameter 1 & 11 Parameter 4,5 & 8 Signal_Strength
0 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 2.785714 1.27027 1.769417 5.0
1 7.8 0.88 0.00 2.6 0.098 25.0 67.0 0.9968 3.20 0.68 9.8 2.051282 1.25641 2.374863 5.0
2 7.8 0.76 0.04 2.3 0.092 15.0 54.0 0.9970 3.26 0.65 9.8 2.248276 1.25641 2.112029 5.0
3 11.2 0.28 0.56 1.9 0.075 17.0 60.0 0.9980 3.16 0.58 9.8 2.225352 0.87500 1.770736 6.0
4 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 2.785714 1.27027 1.769417 5.0
In [61]:
df_new.shape
Out[61]:
(1599, 15)
In [62]:
df_new.isnull().sum()
Out[62]:
Parameter 1             0
Parameter 2             0
Parameter 3             0
Parameter 4             0
Parameter 5             0
Parameter 6             0
Parameter 7             0
Parameter 8             0
Parameter 9             0
Parameter 10            0
Parameter 11            0
Parameter 2,3,9 & 10    0
Parameter 1 & 11        0
Parameter 4,5 & 8       0
Signal_Strength         0
dtype: int64
In [63]:
df_new = df_new.apply(zscore)
df_new.head()
Out[63]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Parameter 2,3,9 & 10 Parameter 1 & 11 Parameter 4,5 & 8 Signal_Strength
0 -0.506257 1.084072 -1.395226 -0.660402 -0.193503 -0.455657 -0.329932 0.648369 1.448448 -0.638601 -0.976125 1.066110 -0.167079 -0.661503 -0.854340
1 -0.237876 2.171450 -1.395226 0.980050 1.341692 1.076386 0.903736 0.039563 -0.783248 0.372245 -0.582843 -0.829895 -0.215210 0.889886 -0.854340
2 -0.237876 1.446531 -1.188918 0.276999 0.923002 -0.017931 0.417746 0.161324 -0.351307 0.119534 -0.582843 -0.321337 -0.215210 0.216403 -0.854340
3 2.043366 -1.453143 1.493090 -0.660402 -0.263285 0.200933 0.642049 0.770130 -1.071209 -0.470127 -0.582843 -0.380517 -1.539705 -0.658123 0.496886
4 -0.506257 1.084072 -1.395226 -0.660402 -0.193503 -0.455657 -0.329932 0.648369 1.448448 -0.638601 -0.976125 1.066110 -0.167079 -0.661503 -0.854340

Splitting X-independent attributes and Y-dependent attributes and keeping the test set seperate

In [64]:
x = df_new.drop(['Signal_Strength'], axis = 1)
y = df_new[['Signal_Strength']]
In [65]:
pca_model = PCA(n_components = 14)
pca_model.fit(x)
plt.step(list(range(1,15)), np.cumsum(pca_model.explained_variance_ratio_), where = 'mid')
plt.ylabel('Cummulation of Variance Explained')
plt.xlabel('Eigen Values')
plt.show()
np.cumsum(pca_model.explained_variance_ratio_)
Out[65]:
array([0.30192231, 0.45883703, 0.59756174, 0.70665676, 0.77984946,
       0.83966866, 0.89180671, 0.92629421, 0.95673865, 0.98038117,
       0.99702532, 0.99891844, 0.99999315, 1.        ])
In [66]:
cluster = range(1,10)
mean_distortions = []
for val in cluster:
    kmeans = KMeans(n_clusters = val)
    kmeans.fit(df_new)
    mean_distortions.append(sum(np.min(cdist(df_new, kmeans.cluster_centers_), axis = 1))/df_new.shape[0])
In [67]:
plt.plot(cluster, mean_distortions,'bx-')
plt.xlabel('No. Of Clusters')
plt.ylabel('Distortion')
plt.title('Elbow Method')
Out[67]:
Text(0.5, 1.0, 'Elbow Method')
In [68]:
X_train1, X_test1, y_train, y_test = train_test_split(x, y, test_size=0.33, random_state=42)      # Splitting Data
In [69]:
from sklearn.preprocessing import StandardScaler          # Importing standard scaler library

X_train_sd = StandardScaler().fit_transform(X_train1)
X_test_sd = StandardScaler().fit_transform(X_test1)
In [70]:
# generating the covariance matrix and the eigen values for the PCA analysis
cov_matrix = np.cov(X_train_sd.T) # the relevanat covariance matrix
print('Covariance Matrix \n%s', cov_matrix)

#generating the eigen values and the eigen vectors
e_vals, e_vecs = np.linalg.eig(cov_matrix)
print('Eigenvectors \n%s' %e_vecs)
print('\nEigenvalues \n%s' %e_vals)
Covariance Matrix 
%s [[ 1.00093458e+00 -2.32410251e-01  5.99138340e-01  1.81366779e-01
   2.00770241e-01 -1.38106617e-01 -1.17948996e-01  5.45473970e-01
  -5.96016295e-01  1.51494592e-01 -4.07013904e-02 -5.60151049e-01
  -8.30272450e-01  1.65313015e-01]
 [-2.32410251e-01  1.00093458e+00 -5.59814698e-01  1.07922524e-02
   1.59415350e-01 -8.04582955e-03  8.01717410e-02 -6.46099117e-03
   1.90831513e-01 -3.16557532e-01 -2.20440731e-01 -4.42435372e-02
   7.51556530e-02  6.40629135e-04]
 [ 5.99138340e-01 -5.59814698e-01  1.00093458e+00  1.63410088e-01
   1.04595515e-01 -6.17874488e-02  6.47931774e-03  3.53543851e-01
  -5.06249440e-01  2.58523442e-01  1.32810983e-01 -6.73142800e-01
  -4.22172072e-01  1.55926053e-01]
 [ 1.81366779e-01  1.07922524e-02  1.63410088e-01  1.00093458e+00
   1.92466826e-01  1.45692815e-02  9.29586456e-02  3.59678476e-01
  -5.43814621e-02  5.42385359e-02  1.15811577e-01 -1.99847489e-01
  -1.06104282e-01  9.98448173e-01]
 [ 2.00770241e-01  1.59415350e-01  1.04595515e-01  1.92466826e-01
   1.00093458e+00 -1.72335571e-02  9.76281709e-02  3.46890200e-01
  -2.09875934e-01 -1.05024895e-01 -2.46927419e-01 -2.05274060e-01
  -3.24443172e-01  1.24685907e-01]
 [-1.38106617e-01 -8.04582955e-03 -6.17874488e-02  1.45692815e-02
  -1.72335571e-02  1.00093458e+00  6.04988594e-01 -1.08082109e-02
   7.98670247e-02  9.95177803e-03 -8.69243249e-02  7.49591853e-02
   6.98448118e-02  1.49663849e-02]
 [-1.17948996e-01  8.01717410e-02  6.47931774e-03  9.29586456e-02
   9.76281709e-02  6.04988594e-01  1.00093458e+00  1.20614180e-01
  -9.86908423e-03 -5.43373209e-02 -2.43726232e-01 -3.64337955e-02
  -8.16826311e-03  8.58413357e-02]
 [ 5.45473970e-01 -6.46099117e-03  3.53543851e-01  3.59678476e-01
   3.46890200e-01 -1.08082109e-02  1.20614180e-01  1.00093458e+00
  -2.60423216e-01  9.55323926e-02 -4.28426800e-01 -4.05307560e-01
  -6.58210784e-01  3.31662704e-01]
 [-5.96016295e-01  1.90831513e-01 -5.06249440e-01 -5.43814621e-02
  -2.09875934e-01  7.98670247e-02 -9.86908423e-03 -2.60423216e-01
   1.00093458e+00 -4.53678069e-03  1.36286469e-01  5.24025642e-01
   5.65431884e-01 -3.89333917e-02]
 [ 1.51494592e-01 -3.16557532e-01  2.58523442e-01  5.42385359e-02
  -1.05024895e-01  9.95177803e-03 -5.43373209e-02  9.55323926e-02
  -4.53678069e-03  1.00093458e+00  2.34612742e-01 -4.98927827e-01
  -1.77912190e-03  6.16802307e-02]
 [-4.07013904e-02 -2.20440731e-01  1.32810983e-01  1.15811577e-01
  -2.46927419e-01 -8.69243249e-02 -2.43726232e-01 -4.28426800e-01
   1.36286469e-01  2.34612742e-01  1.00093458e+00 -2.30174036e-02
   5.37935798e-01  1.37429549e-01]
 [-5.60151049e-01 -4.42435372e-02 -6.73142800e-01 -1.99847489e-01
  -2.05274060e-01  7.49591853e-02 -3.64337955e-02 -4.05307560e-01
   5.24025642e-01 -4.98927827e-01 -2.30174036e-02  1.00093458e+00
   4.52607989e-01 -1.86357839e-01]
 [-8.30272450e-01  7.51556530e-02 -4.22172072e-01 -1.06104282e-01
  -3.24443172e-01  6.98448118e-02 -8.16826311e-03 -6.58210784e-01
   5.65431884e-01 -1.77912190e-03  5.37935798e-01  4.52607989e-01
   1.00093458e+00 -8.01744441e-02]
 [ 1.65313015e-01  6.40629135e-04  1.55926053e-01  9.98448173e-01
   1.24685907e-01  1.49663849e-02  8.58413357e-02  3.31662704e-01
  -3.89333917e-02  6.16802307e-02  1.37429549e-01 -1.86357839e-01
  -8.01744441e-02  1.00093458e+00]]
Eigenvectors 
[[-4.21104370e-01  8.22452598e-02  1.28079100e-01 -6.84139299e-02
   1.11667098e-01  1.67205110e-02 -1.75744193e-01  5.08077278e-01
   7.25870052e-03 -3.31146817e-01  1.58787734e-01 -5.96996726e-01
  -4.13488723e-02  2.27604449e-03]
 [ 1.17233892e-01 -3.99690771e-01 -1.75907921e-02 -2.61333660e-01
  -5.59458407e-01  2.48971968e-01 -3.65615882e-01  1.39045605e-01
   1.39823898e-01  4.20102868e-02  9.83917079e-03 -3.38070557e-02
   4.55358020e-01  1.77282701e-03]
 [-3.64053797e-01  2.87910085e-01  2.04928987e-02  1.88562334e-01
   1.46556144e-01  1.39645284e-01  1.09587649e-01 -2.07592397e-01
   4.89763651e-01  3.02904163e-01  2.09721105e-01 -4.09751547e-02
   5.24623087e-01  1.63852616e-03]
 [-1.96505160e-01 -1.01110662e-01 -6.12901010e-01 -1.51518591e-01
   1.12117176e-01 -3.35694601e-03 -4.78724443e-02 -7.46302574e-02
  -1.26401829e-01  3.19726093e-02  9.38757925e-02 -1.57397991e-02
   1.05775147e-02 -7.11295922e-01]
 [-1.85989690e-01 -2.96053196e-01  7.16183470e-03 -1.35520717e-01
  -1.58536594e-01  3.30838233e-01  8.32248451e-01  1.17176031e-01
  -1.05989846e-01 -2.55749548e-02  5.82050399e-02 -2.95317652e-02
  -1.42706500e-03  4.73051218e-02]
 [ 4.62487028e-02 -2.29066656e-01 -1.19030397e-01  6.39860437e-01
   5.40799654e-02  7.05060478e-02 -5.77490869e-02  4.74902779e-01
  -1.88541518e-01  4.98326260e-01 -2.98175828e-02 -1.62481845e-02
  -2.48017711e-03  5.90887255e-04]
 [-1.56334691e-02 -3.36194502e-01 -1.23076153e-01  5.85672888e-01
  -8.00949332e-03  1.35132954e-01 -4.01967656e-02 -2.57954320e-01
   2.06835438e-01 -6.30252401e-01  3.63769275e-02  2.76338418e-02
  -1.03716785e-03  3.59558031e-04]
 [-3.57765490e-01 -2.46653088e-01 -3.84703220e-02 -4.97274397e-02
  -2.25713684e-02 -4.13459744e-01  6.80580133e-02  9.10502130e-02
   3.47463126e-01  4.73317263e-02 -7.04907195e-01  4.94693148e-02
  -2.14285057e-02  7.83583505e-03]
 [ 3.35862467e-01 -4.59758821e-02 -2.02458627e-01 -3.08808139e-02
  -1.31208013e-01 -4.70466652e-01  2.09962392e-01  2.85359204e-01
   5.07190290e-01 -9.67462312e-03  4.49785169e-01  1.00623381e-03
  -1.39379337e-01 -1.05218515e-03]
 [-1.26035267e-01  3.55427307e-01 -1.05748588e-01  2.44183019e-01
  -5.33095488e-01 -4.28429531e-01  1.54758553e-01 -2.05160847e-02
  -4.02032548e-01 -1.59933608e-01 -1.20937347e-02 -2.95387824e-02
   3.26550509e-01  1.29849855e-03]
 [ 1.02116104e-01  4.87349977e-01 -2.79940170e-01 -6.28621939e-03
  -3.89723715e-02  3.84697880e-01  2.22269281e-02  4.50026871e-01
   1.85213315e-01 -2.54158882e-01 -2.69716474e-01  3.86486248e-01
   1.03658616e-02  6.19552503e-05]
 [ 3.70274067e-01 -1.38209595e-01  5.51273557e-03 -9.49427003e-02
   5.30311137e-01 -1.80804540e-01  1.49243130e-01  1.59127928e-01
  -1.64653117e-01 -2.09233743e-01 -1.26307781e-01 -4.49981867e-02
   6.19961271e-01  3.93007166e-03]
 [ 4.03560494e-01  1.86875189e-01 -2.44597053e-01  6.85318480e-02
  -1.11513845e-01  1.68935504e-01  1.30392231e-01 -2.06512162e-01
   1.35924207e-01  1.09988229e-01 -3.43641982e-01 -6.95567471e-01
  -6.77170569e-02  2.44760011e-03]
 [-1.83646618e-01 -8.00457643e-02 -6.21268542e-01 -1.44598075e-01
   1.23552601e-01 -2.17650107e-02 -1.05420916e-01 -8.61518212e-02
  -1.24480068e-01  3.48849063e-02  1.00778421e-01 -9.70576076e-03
   4.77184953e-03  7.01229789e-01]]

Eigenvalues 
[4.12202279e+00 2.15358695e+00 2.01552718e+00 1.54934624e+00
 1.02318497e+00 8.61571440e-01 7.33545638e-01 4.97825513e-01
 4.45780873e-01 3.14693127e-01 2.52704108e-01 2.80666963e-02
 1.51377450e-02 9.08350207e-05]
In [71]:
# The "cumulative variance explained" analysis 
tot = sum(e_vals)
var_exp = [( i /tot ) * 100 for i in sorted(e_vals, reverse=True)]
cum_var_exp = np.cumsum(var_exp)
print("Cumulative Variance Explained", cum_var_exp)
Cumulative Variance Explained [ 29.41552882  44.78392973  59.16711025  70.22353598  77.52517611
  83.67351172  88.90823116  92.46080752  95.64198355  97.8876928
  99.69103678  99.89132599  99.99935178 100.        ]
In [72]:
# Plotting the variance expalained by the principal components and the cumulative variance explained.
plt.figure(figsize=(10 , 5))
plt.bar(range(1, e_vals.size + 1), var_exp, alpha = 0.5, align = 'center', label = 'Individual explained variance')
plt.step(range(1, e_vals.size + 1), cum_var_exp, where='mid', label = 'Cumulative explained variance')
plt.ylabel('Explained Variance Ratio')
plt.xlabel('Principal Components')
plt.legend(loc = 'best')
plt.tight_layout()
plt.show()

Observation :

We can notice through pca that 11 parameters explain 95%-97% of the data.

In [73]:
# Values of eigen pairs

eigen_pairs = [(np.abs(e_vals[i]), e_vecs[:,i]) for i in range(len(e_vals))]
eigen_pairs.sort(reverse=True)
eigen_pairs[:14]
Out[73]:
[(4.122022794950574,
  array([-0.42110437,  0.11723389, -0.3640538 , -0.19650516, -0.18598969,
          0.0462487 , -0.01563347, -0.35776549,  0.33586247, -0.12603527,
          0.1021161 ,  0.37027407,  0.40356049, -0.18364662])),
 (2.1535869471343827,
  array([ 0.08224526, -0.39969077,  0.28791009, -0.10111066, -0.2960532 ,
         -0.22906666, -0.3361945 , -0.24665309, -0.04597588,  0.35542731,
          0.48734998, -0.1382096 ,  0.18687519, -0.08004576])),
 (2.015527184177692,
  array([ 0.1280791 , -0.01759079,  0.0204929 , -0.61290101,  0.00716183,
         -0.1190304 , -0.12307615, -0.03847032, -0.20245863, -0.10574859,
         -0.27994017,  0.00551274, -0.24459705, -0.62126854])),
 (1.5493462377287603,
  array([-0.06841393, -0.26133366,  0.18856233, -0.15151859, -0.13552072,
          0.63986044,  0.58567289, -0.04972744, -0.03088081,  0.24418302,
         -0.00628622, -0.0949427 ,  0.06853185, -0.14459807])),
 (1.0231849727991564,
  array([ 0.1116671 , -0.55945841,  0.14655614,  0.11211718, -0.15853659,
          0.05407997, -0.00800949, -0.02257137, -0.13120801, -0.53309549,
         -0.03897237,  0.53031114, -0.11151385,  0.1235526 ])),
 (0.8615714400645843,
  array([ 0.01672051,  0.24897197,  0.13964528, -0.00335695,  0.33083823,
          0.07050605,  0.13513295, -0.41345974, -0.47046665, -0.42842953,
          0.38469788, -0.18080454,  0.1689355 , -0.02176501])),
 (0.7335456384355692,
  array([-0.17574419, -0.36561588,  0.10958765, -0.04787244,  0.83224845,
         -0.05774909, -0.04019677,  0.06805801,  0.20996239,  0.15475855,
          0.02222693,  0.14924313,  0.13039223, -0.10542092])),
 (0.49782551314479145,
  array([ 0.50807728,  0.13904561, -0.2075924 , -0.07463026,  0.11717603,
          0.47490278, -0.25795432,  0.09105021,  0.2853592 , -0.02051608,
          0.45002687,  0.15912793, -0.20651216, -0.08615182])),
 (0.44578087263919053,
  array([ 0.0072587 ,  0.1398239 ,  0.48976365, -0.12640183, -0.10598985,
         -0.18854152,  0.20683544,  0.34746313,  0.50719029, -0.40203255,
          0.18521332, -0.16465312,  0.13592421, -0.12448007])),
 (0.31469312652984377,
  array([-0.33114682,  0.04201029,  0.30290416,  0.03197261, -0.02557495,
          0.49832626, -0.6302524 ,  0.04733173, -0.00967462, -0.15993361,
         -0.25415888, -0.20923374,  0.10998823,  0.03488491])),
 (0.25270410822569783,
  array([ 0.15878773,  0.00983917,  0.20972111,  0.09387579,  0.05820504,
         -0.02981758,  0.03637693, -0.7049072 ,  0.44978517, -0.01209373,
         -0.26971647, -0.12630778, -0.34364198,  0.10077842])),
 (0.028066696331738465,
  array([-0.59699673, -0.03380706, -0.04097515, -0.0157398 , -0.02953177,
         -0.01624818,  0.02763384,  0.04946931,  0.00100623, -0.02953878,
          0.38648625, -0.04499819, -0.69556747, -0.00970576])),
 (0.015137744966902528,
  array([-0.04134887,  0.45535802,  0.52462309,  0.01057751, -0.00142707,
         -0.00248018, -0.00103717, -0.02142851, -0.13937934,  0.32655051,
          0.01036586,  0.61996127, -0.06771706,  0.00477185])),
 (9.083502065307332e-05,
  array([ 2.27604449e-03,  1.77282701e-03,  1.63852616e-03, -7.11295922e-01,
          4.73051218e-02,  5.90887255e-04,  3.59558031e-04,  7.83583505e-03,
         -1.05218515e-03,  1.29849855e-03,  6.19552503e-05,  3.93007166e-03,
          2.44760011e-03,  7.01229789e-01]))]
In [74]:
# Generating dimensionally reduced datasets

w = np.hstack((eigen_pairs[0][1].reshape(14,1), 
                      eigen_pairs[1][1].reshape(14,1)))
print('Matrix W:\n', w)
X_sd_pca = X_train_sd.dot(w)
X_test_sd_pca = X_test_sd.dot(w)
Matrix W:
 [[-0.42110437  0.08224526]
 [ 0.11723389 -0.39969077]
 [-0.3640538   0.28791009]
 [-0.19650516 -0.10111066]
 [-0.18598969 -0.2960532 ]
 [ 0.0462487  -0.22906666]
 [-0.01563347 -0.3361945 ]
 [-0.35776549 -0.24665309]
 [ 0.33586247 -0.04597588]
 [-0.12603527  0.35542731]
 [ 0.1021161   0.48734998]
 [ 0.37027407 -0.1382096 ]
 [ 0.40356049  0.18687519]
 [-0.18364662 -0.08004576]]
In [75]:
X_train_sd.shape, w.shape, X_sd_pca.shape, X_test_sd_pca.shape
Out[75]:
((1071, 14), (14, 2), (1071, 2), (528, 2))
In [76]:
X_sd_pca, X_train_sd
Out[76]:
(array([[-2.37577805,  0.11579088],
        [ 3.07041545,  0.53924261],
        [ 1.65303526, -2.31916528],
        ...,
        [ 1.08962381, -2.34631815],
        [ 1.24284746,  3.83430216],
        [ 3.85828245,  3.25440715]]),
 array([[-1.67185350e-01, -1.06169860e+00,  1.13839302e+00, ...,
         -1.05859058e+00, -3.01152034e-03,  9.95481094e-01],
        [-9.87286922e-01,  1.35318727e+00, -1.33018202e+00, ...,
          5.74394620e-01,  2.06699217e+00,  5.44003432e-01],
        [-1.05562872e+00,  6.28721512e-01, -1.38161067e+00, ...,
          1.53948068e+00,  4.25416116e-01, -8.93267766e-03],
        ...,
        [-6.45577934e-01,  5.68349365e-01, -1.07303879e+00, ...,
          1.31642642e+00,  7.40397212e-03,  1.25794613e+00],
        [-1.67185350e-01, -1.96728080e+00,  4.18391970e-01, ...,
          2.31736429e-01,  6.58678589e-01, -1.03844031e+00],
        [-1.60236310e+00, -1.42393148e+00, -4.44658515e-02, ...,
          1.92146479e+00,  3.52067845e+00, -1.08244993e+00]]))
In [77]:
y_train
Out[77]:
Signal_Strength
548 0.496886
355 0.496886
1296 -0.854340
209 1.848112
140 -0.854340
... ...
1130 0.496886
1294 0.496886
860 -0.854340
1459 1.848112
1126 0.496886

1071 rows × 1 columns

I ] Neural Network Models before PCA

A. -------------------SGD Optimizer--------------------

In [78]:
# SGD Neural Network regression model before pca

reg_model = Sequential()

# Input Layer

reg_model.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Output Layer

reg_model.add(Dense(1, kernel_initializer = 'normal', activation = 'softmax'))

sgd = optimizers.SGD(lr = 0.01)
reg_model.compile(optimizer = sgd, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [79]:
reg_model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense (Dense)                (None, 9)                 135       
_________________________________________________________________
dense_1 (Dense)              (None, 1)                 10        
=================================================================
Total params: 145
Trainable params: 145
Non-trainable params: 0
_________________________________________________________________
In [80]:
history = reg_model.fit(X_train_sd, y_train, epochs = 100, verbose = 1)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 1.9642 - mean_absolute_error: 1.2080
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0500 - mean_absolute_error: 1.2275
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0118 - mean_absolute_error: 1.2213
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0871 - mean_absolute_error: 1.2381
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9707 - mean_absolute_error: 1.2133
Epoch 6/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9127 - mean_absolute_error: 1.1915
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9486 - mean_absolute_error: 1.1961
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0296 - mean_absolute_error: 1.2279
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0603 - mean_absolute_error: 1.2289
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0210 - mean_absolute_error: 1.2221
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9166 - mean_absolute_error: 1.1872
Epoch 12/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0718 - mean_absolute_error: 1.2377
Epoch 13/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9053 - mean_absolute_error: 1.1882
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0608 - mean_absolute_error: 1.2298
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1178 - mean_absolute_error: 1.2614
Epoch 16/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9684 - mean_absolute_error: 1.2077
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9947 - mean_absolute_error: 1.2144
Epoch 18/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1032 - mean_absolute_error: 1.2420
Epoch 19/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0105 - mean_absolute_error: 1.2150
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0380 - mean_absolute_error: 1.2218
Epoch 21/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0226 - mean_absolute_error: 1.2202
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8952 - mean_absolute_error: 1.1835
Epoch 23/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9085 - mean_absolute_error: 1.1811
Epoch 24/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9692 - mean_absolute_error: 1.2044
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0110 - mean_absolute_error: 1.2188
Epoch 26/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0665 - mean_absolute_error: 1.2367
Epoch 27/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0456 - mean_absolute_error: 1.2294
Epoch 28/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9893 - mean_absolute_error: 1.2102
Epoch 29/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0994 - mean_absolute_error: 1.2425
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0026 - mean_absolute_error: 1.2211
Epoch 31/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9509 - mean_absolute_error: 1.2071
Epoch 32/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0968 - mean_absolute_error: 1.2401
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9884 - mean_absolute_error: 1.2079
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9682 - mean_absolute_error: 1.2110
Epoch 35/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0907 - mean_absolute_error: 1.2416
Epoch 36/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9827 - mean_absolute_error: 1.2104
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0177 - mean_absolute_error: 1.2097
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9153 - mean_absolute_error: 1.1895
Epoch 39/100
34/34 [==============================] - 0s 3ms/step - loss: 2.0711 - mean_absolute_error: 1.2558
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9691 - mean_absolute_error: 1.2013A: 0s - loss: 1.9582 - mean_absolute_error: 1.196
Epoch 41/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9942 - mean_absolute_error: 1.2125
Epoch 42/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9865 - mean_absolute_error: 1.2155
Epoch 43/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9857 - mean_absolute_error: 1.2196
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9330 - mean_absolute_error: 1.2050
Epoch 45/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0150 - mean_absolute_error: 1.2164
Epoch 46/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0208 - mean_absolute_error: 1.2142
Epoch 47/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0453 - mean_absolute_error: 1.2244
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0042 - mean_absolute_error: 1.2186
Epoch 49/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0097 - mean_absolute_error: 1.2116
Epoch 50/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9975 - mean_absolute_error: 1.2108
Epoch 51/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9294 - mean_absolute_error: 1.1899
Epoch 52/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9269 - mean_absolute_error: 1.1928
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9769 - mean_absolute_error: 1.2110
Epoch 54/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9741 - mean_absolute_error: 1.2049
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0920 - mean_absolute_error: 1.2535
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8186 - mean_absolute_error: 1.1598
Epoch 57/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0465 - mean_absolute_error: 1.2335
Epoch 58/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9296 - mean_absolute_error: 1.1903
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9729 - mean_absolute_error: 1.1993
Epoch 60/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9964 - mean_absolute_error: 1.2118
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0227 - mean_absolute_error: 1.2243
Epoch 62/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0099 - mean_absolute_error: 1.2199
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9910 - mean_absolute_error: 1.2112
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9656 - mean_absolute_error: 1.2034
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9410 - mean_absolute_error: 1.1944
Epoch 66/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1060 - mean_absolute_error: 1.2470
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9339 - mean_absolute_error: 1.1912
Epoch 68/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0718 - mean_absolute_error: 1.2338
Epoch 69/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9952 - mean_absolute_error: 1.2100
Epoch 70/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0271 - mean_absolute_error: 1.2230
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0046 - mean_absolute_error: 1.2161
Epoch 72/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0061 - mean_absolute_error: 1.2130
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9692 - mean_absolute_error: 1.2019
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0149 - mean_absolute_error: 1.2237
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9983 - mean_absolute_error: 1.2089
Epoch 76/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0213 - mean_absolute_error: 1.2250
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9693 - mean_absolute_error: 1.2087
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0066 - mean_absolute_error: 1.2107
Epoch 79/100
34/34 [==============================] - 0s 3ms/step - loss: 2.0183 - mean_absolute_error: 1.2173
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9777 - mean_absolute_error: 1.2064
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9772 - mean_absolute_error: 1.2098
Epoch 82/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0016 - mean_absolute_error: 1.2089
Epoch 83/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0567 - mean_absolute_error: 1.2301
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9283 - mean_absolute_error: 1.1865
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9815 - mean_absolute_error: 1.2064
Epoch 86/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9240 - mean_absolute_error: 1.1856
Epoch 87/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9100 - mean_absolute_error: 1.1782
Epoch 88/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0932 - mean_absolute_error: 1.2539
Epoch 89/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8957 - mean_absolute_error: 1.1759
Epoch 90/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0215 - mean_absolute_error: 1.2195
Epoch 91/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0640 - mean_absolute_error: 1.2378
Epoch 92/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9492 - mean_absolute_error: 1.2045
Epoch 93/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1095 - mean_absolute_error: 1.2534
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0320 - mean_absolute_error: 1.2322
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0704 - mean_absolute_error: 1.2360
Epoch 96/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0294 - mean_absolute_error: 1.2266
Epoch 97/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9766 - mean_absolute_error: 1.2143
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0399 - mean_absolute_error: 1.2249
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0295 - mean_absolute_error: 1.2241
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0213 - mean_absolute_error: 1.2190
In [81]:
reg_model.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[81]:
[2.0075135231018066, 1.2158657312393188]

1. Adding Two Hidden Layers to Model

In [82]:
# Initialize Sequential model
model = Sequential()

# Input Layer
model.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding two Hidden layers
model.add(Dense(6, activation='tanh', kernel_initializer = 'normal'))    # 2nd layer
model.add(Dense(5, activation='tanh', kernel_initializer = 'normal'))    # 3rd layer

#Output layer
model.add(Dense(1, activation='softmax', kernel_initializer = 'normal'))

sgd1 = optimizers.SGD(lr = 0.01)
model.compile(optimizer = sgd1, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [83]:
model.summary()
Model: "sequential_1"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_2 (Dense)              (None, 9)                 135       
_________________________________________________________________
dense_3 (Dense)              (None, 6)                 60        
_________________________________________________________________
dense_4 (Dense)              (None, 5)                 35        
_________________________________________________________________
dense_5 (Dense)              (None, 1)                 6         
=================================================================
Total params: 236
Trainable params: 236
Non-trainable params: 0
_________________________________________________________________
In [84]:
model.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 2.0361 - mean_absolute_error: 1.2281
Epoch 2/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9039 - mean_absolute_error: 1.1763
Epoch 3/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1157 - mean_absolute_error: 1.2590
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0494 - mean_absolute_error: 1.2318
Epoch 5/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9727 - mean_absolute_error: 1.2045
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0017 - mean_absolute_error: 1.2266
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9588 - mean_absolute_error: 1.2019
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0564 - mean_absolute_error: 1.2293
Epoch 9/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1082 - mean_absolute_error: 1.2482
Epoch 10/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0267 - mean_absolute_error: 1.2212
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9449 - mean_absolute_error: 1.1957
Epoch 12/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0373 - mean_absolute_error: 1.2211
Epoch 13/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9794 - mean_absolute_error: 1.2013
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1053 - mean_absolute_error: 1.2483
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9964 - mean_absolute_error: 1.2078
Epoch 16/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0492 - mean_absolute_error: 1.2311
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0250 - mean_absolute_error: 1.2164
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8861 - mean_absolute_error: 1.1735
Epoch 19/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0062 - mean_absolute_error: 1.2186
Epoch 20/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0571 - mean_absolute_error: 1.2305
Epoch 21/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9985 - mean_absolute_error: 1.2134
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9309 - mean_absolute_error: 1.1881
Epoch 23/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9617 - mean_absolute_error: 1.2058
Epoch 24/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9667 - mean_absolute_error: 1.2078
Epoch 25/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0065 - mean_absolute_error: 1.2180
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1566 - mean_absolute_error: 1.2570
Epoch 27/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0137 - mean_absolute_error: 1.2146
Epoch 28/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0062 - mean_absolute_error: 1.2129
Epoch 29/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0039 - mean_absolute_error: 1.2200
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0829 - mean_absolute_error: 1.2388
Epoch 31/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0521 - mean_absolute_error: 1.2296
Epoch 32/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8604 - mean_absolute_error: 1.1674
Epoch 33/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0051 - mean_absolute_error: 1.2135
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0220 - mean_absolute_error: 1.2216
Epoch 35/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9544 - mean_absolute_error: 1.1950
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9876 - mean_absolute_error: 1.2121
Epoch 37/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0053 - mean_absolute_error: 1.2209
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9415 - mean_absolute_error: 1.1983
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0052 - mean_absolute_error: 1.2062
Epoch 40/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0930 - mean_absolute_error: 1.2538
Epoch 41/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0420 - mean_absolute_error: 1.2322
Epoch 42/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0618 - mean_absolute_error: 1.2316
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0073 - mean_absolute_error: 1.2199
Epoch 44/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9600 - mean_absolute_error: 1.2042
Epoch 45/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9532 - mean_absolute_error: 1.1902
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0335 - mean_absolute_error: 1.2272
Epoch 47/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8865 - mean_absolute_error: 1.1717
Epoch 48/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0248 - mean_absolute_error: 1.2158
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9772 - mean_absolute_error: 1.2058
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0065 - mean_absolute_error: 1.2185
Epoch 51/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9809 - mean_absolute_error: 1.2118
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0426 - mean_absolute_error: 1.2305
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8987 - mean_absolute_error: 1.1784
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0107 - mean_absolute_error: 1.2193
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9812 - mean_absolute_error: 1.2116
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9958 - mean_absolute_error: 1.2089
Epoch 57/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9438 - mean_absolute_error: 1.1986
Epoch 58/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0341 - mean_absolute_error: 1.2251
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0723 - mean_absolute_error: 1.2339
Epoch 60/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0054 - mean_absolute_error: 1.2199
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9186 - mean_absolute_error: 1.1910
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9480 - mean_absolute_error: 1.2037
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0213 - mean_absolute_error: 1.2222
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0285 - mean_absolute_error: 1.2227
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0049 - mean_absolute_error: 1.2183
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9349 - mean_absolute_error: 1.1851
Epoch 67/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0027 - mean_absolute_error: 1.2183
Epoch 68/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9387 - mean_absolute_error: 1.1870
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9027 - mean_absolute_error: 1.1784
Epoch 70/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1221 - mean_absolute_error: 1.2547
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0759 - mean_absolute_error: 1.2531
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0668 - mean_absolute_error: 1.2346
Epoch 73/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9323 - mean_absolute_error: 1.1930
Epoch 74/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0870 - mean_absolute_error: 1.2334
Epoch 75/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0914 - mean_absolute_error: 1.2473
Epoch 76/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9260 - mean_absolute_error: 1.1873
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0044 - mean_absolute_error: 1.2158
Epoch 78/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0928 - mean_absolute_error: 1.2424
Epoch 79/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9561 - mean_absolute_error: 1.1990
Epoch 80/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0284 - mean_absolute_error: 1.2187
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0705 - mean_absolute_error: 1.2398
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0911 - mean_absolute_error: 1.2396
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0216 - mean_absolute_error: 1.2183
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0597 - mean_absolute_error: 1.2402
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1156 - mean_absolute_error: 1.2554
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9292 - mean_absolute_error: 1.1958
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0156 - mean_absolute_error: 1.2177
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1691 - mean_absolute_error: 1.2594
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9290 - mean_absolute_error: 1.1929
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9393 - mean_absolute_error: 1.1960
Epoch 91/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0783 - mean_absolute_error: 1.2371
Epoch 92/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0117 - mean_absolute_error: 1.2145
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0297 - mean_absolute_error: 1.2259
Epoch 94/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9813 - mean_absolute_error: 1.2023
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9944 - mean_absolute_error: 1.2144
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0119 - mean_absolute_error: 1.2148
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9421 - mean_absolute_error: 1.1901
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9618 - mean_absolute_error: 1.1880
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0255 - mean_absolute_error: 1.2221
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0106 - mean_absolute_error: 1.2163
Out[84]:
<tensorflow.python.keras.callbacks.History at 0x1a54f4ea50>
In [85]:
model.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 2ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[85]:
[2.0075135231018066, 1.2158657312393188]

2. Adding Four Hidden Layers to Model

In [86]:
# Initialize Sequential model
model1 = Sequential()

# Input Layer
model1.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding four Hidden layers
model1.add(Dense(10, activation='sigmoid', kernel_initializer = 'normal'))    # 2nd layer
model1.add(Dense(12, activation='sigmoid', kernel_initializer = 'normal'))    # 3rd layer

model1.add(Dense(8, activation='sigmoid', kernel_initializer = 'normal'))     # 4th layer 
model1.add(Dense(6, activation='sigmoid', kernel_initializer = 'normal'))     # 5th layer


#Output layer
model1.add(Dense(1, activation='softmax', kernel_initializer = 'normal'))

sgd2 = optimizers.SGD(lr = 0.01)
model1.compile(optimizer = sgd2, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [87]:
model1.summary()
Model: "sequential_2"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_6 (Dense)              (None, 9)                 135       
_________________________________________________________________
dense_7 (Dense)              (None, 10)                100       
_________________________________________________________________
dense_8 (Dense)              (None, 12)                132       
_________________________________________________________________
dense_9 (Dense)              (None, 8)                 104       
_________________________________________________________________
dense_10 (Dense)             (None, 6)                 54        
_________________________________________________________________
dense_11 (Dense)             (None, 1)                 7         
=================================================================
Total params: 532
Trainable params: 532
Non-trainable params: 0
_________________________________________________________________
In [88]:
model1.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 2.0125 - mean_absolute_error: 1.2105
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9454 - mean_absolute_error: 1.2002
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9555 - mean_absolute_error: 1.2024
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9688 - mean_absolute_error: 1.2001
Epoch 5/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1153 - mean_absolute_error: 1.2522
Epoch 6/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0267 - mean_absolute_error: 1.2221
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9144 - mean_absolute_error: 1.1810
Epoch 8/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9418 - mean_absolute_error: 1.1939
Epoch 9/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0263 - mean_absolute_error: 1.2295
Epoch 10/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0133 - mean_absolute_error: 1.2181
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1143 - mean_absolute_error: 1.2508
Epoch 12/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9602 - mean_absolute_error: 1.1914
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0465 - mean_absolute_error: 1.2315
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9440 - mean_absolute_error: 1.2037
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9036 - mean_absolute_error: 1.1864
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9804 - mean_absolute_error: 1.2030
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0321 - mean_absolute_error: 1.2347
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0405 - mean_absolute_error: 1.2309
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0723 - mean_absolute_error: 1.2318
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9736 - mean_absolute_error: 1.1976
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0419 - mean_absolute_error: 1.2231
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9589 - mean_absolute_error: 1.2027
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9493 - mean_absolute_error: 1.1906
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9443 - mean_absolute_error: 1.1934
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0866 - mean_absolute_error: 1.2362
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0674 - mean_absolute_error: 1.2362
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0360 - mean_absolute_error: 1.2281
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0516 - mean_absolute_error: 1.2325
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9661 - mean_absolute_error: 1.2103
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0337 - mean_absolute_error: 1.2267
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9846 - mean_absolute_error: 1.2200
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9302 - mean_absolute_error: 1.1986
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 2.2441 - mean_absolute_error: 1.2948
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9426 - mean_absolute_error: 1.1917
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1362 - mean_absolute_error: 1.2675
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9815 - mean_absolute_error: 1.2062
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9454 - mean_absolute_error: 1.1941
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0163 - mean_absolute_error: 1.2233
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0161 - mean_absolute_error: 1.2206
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9750 - mean_absolute_error: 1.2001
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0243 - mean_absolute_error: 1.2216
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9317 - mean_absolute_error: 1.1884
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0173 - mean_absolute_error: 1.2195
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9095 - mean_absolute_error: 1.1817
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9453 - mean_absolute_error: 1.1905
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9768 - mean_absolute_error: 1.2098
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0634 - mean_absolute_error: 1.2410
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0043 - mean_absolute_error: 1.2206
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0137 - mean_absolute_error: 1.2182
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1600 - mean_absolute_error: 1.2724
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0233 - mean_absolute_error: 1.2291
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0313 - mean_absolute_error: 1.2103
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9120 - mean_absolute_error: 1.1835
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8386 - mean_absolute_error: 1.1581
Epoch 55/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9956 - mean_absolute_error: 1.2116
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9382 - mean_absolute_error: 1.1953
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0758 - mean_absolute_error: 1.2417
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9821 - mean_absolute_error: 1.2086
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0580 - mean_absolute_error: 1.2277
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0664 - mean_absolute_error: 1.2351
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9643 - mean_absolute_error: 1.1959
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9492 - mean_absolute_error: 1.2007
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0699 - mean_absolute_error: 1.2299
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0905 - mean_absolute_error: 1.2399
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9913 - mean_absolute_error: 1.2139
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0794 - mean_absolute_error: 1.2303
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0420 - mean_absolute_error: 1.2259
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9924 - mean_absolute_error: 1.2081
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0436 - mean_absolute_error: 1.2290
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9951 - mean_absolute_error: 1.2134
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0285 - mean_absolute_error: 1.2218
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9300 - mean_absolute_error: 1.1795
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0676 - mean_absolute_error: 1.2357
Epoch 74/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0287 - mean_absolute_error: 1.2169
Epoch 75/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0581 - mean_absolute_error: 1.2372
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8810 - mean_absolute_error: 1.1786
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9413 - mean_absolute_error: 1.1943
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1200 - mean_absolute_error: 1.2506
Epoch 79/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0611 - mean_absolute_error: 1.2284
Epoch 80/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0074 - mean_absolute_error: 1.2185
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0381 - mean_absolute_error: 1.2133
Epoch 82/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0185 - mean_absolute_error: 1.2170
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9962 - mean_absolute_error: 1.2132
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0116 - mean_absolute_error: 1.2231
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0259 - mean_absolute_error: 1.2185
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0634 - mean_absolute_error: 1.2301
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0393 - mean_absolute_error: 1.2152
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0110 - mean_absolute_error: 1.2170
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0387 - mean_absolute_error: 1.2342
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0147 - mean_absolute_error: 1.2199
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0743 - mean_absolute_error: 1.2396
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0140 - mean_absolute_error: 1.2191
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0426 - mean_absolute_error: 1.2226
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0679 - mean_absolute_error: 1.2320
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9195 - mean_absolute_error: 1.1808
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9674 - mean_absolute_error: 1.1952
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9847 - mean_absolute_error: 1.2091
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9429 - mean_absolute_error: 1.1950
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9439 - mean_absolute_error: 1.1951
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0510 - mean_absolute_error: 1.2341
Out[88]:
<tensorflow.python.keras.callbacks.History at 0x1a5579c210>
In [89]:
model1.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[89]:
[2.0075135231018066, 1.2158657312393188]

B. --------------------Adam Optimizer---------------------

In [90]:
# Adam Neural Network regression model before pca

reg_model1 = Sequential()

# Input Layer

reg_model1.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Output Layer

reg_model1.add(Dense(1, kernel_initializer = 'normal', activation = 'softmax'))

adam = optimizers.Adam(lr = 0.01)
reg_model1.compile(optimizer = adam, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [91]:
reg_model1.summary()
Model: "sequential_3"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_12 (Dense)             (None, 9)                 135       
_________________________________________________________________
dense_13 (Dense)             (None, 1)                 10        
=================================================================
Total params: 145
Trainable params: 145
Non-trainable params: 0
_________________________________________________________________
In [92]:
history1 = reg_model1.fit(X_train_sd, y_train, epochs = 100, verbose = 1)
Epoch 1/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0695 - mean_absolute_error: 1.2373
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1121 - mean_absolute_error: 1.2485
Epoch 3/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9787 - mean_absolute_error: 1.2127
Epoch 4/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0089 - mean_absolute_error: 1.2184
Epoch 5/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0772 - mean_absolute_error: 1.2302
Epoch 6/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0407 - mean_absolute_error: 1.2275
Epoch 7/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0197 - mean_absolute_error: 1.2203
Epoch 8/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1094 - mean_absolute_error: 1.2561
Epoch 9/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9826 - mean_absolute_error: 1.2005
Epoch 10/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9828 - mean_absolute_error: 1.2080
Epoch 11/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0261 - mean_absolute_error: 1.2253
Epoch 12/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1171 - mean_absolute_error: 1.2560
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9987 - mean_absolute_error: 1.2093
Epoch 14/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0302 - mean_absolute_error: 1.2327
Epoch 15/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0277 - mean_absolute_error: 1.2327
Epoch 16/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9859 - mean_absolute_error: 1.2112
Epoch 17/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1045 - mean_absolute_error: 1.2384
Epoch 18/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0581 - mean_absolute_error: 1.2304
Epoch 19/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0088 - mean_absolute_error: 1.2298
Epoch 20/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0128 - mean_absolute_error: 1.2159
Epoch 21/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0217 - mean_absolute_error: 1.2214
Epoch 22/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9957 - mean_absolute_error: 1.2069
Epoch 23/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9859 - mean_absolute_error: 1.2021
Epoch 24/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0946 - mean_absolute_error: 1.2516
Epoch 25/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9675 - mean_absolute_error: 1.1929
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9899 - mean_absolute_error: 1.2013
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0243 - mean_absolute_error: 1.2176
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9864 - mean_absolute_error: 1.2071
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9725 - mean_absolute_error: 1.1992
Epoch 30/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0452 - mean_absolute_error: 1.2282
Epoch 31/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1556 - mean_absolute_error: 1.2724
Epoch 32/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9951 - mean_absolute_error: 1.2159
Epoch 33/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9641 - mean_absolute_error: 1.2029
Epoch 34/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0471 - mean_absolute_error: 1.2274
Epoch 35/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9899 - mean_absolute_error: 1.2144
Epoch 36/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9891 - mean_absolute_error: 1.2080
Epoch 37/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9930 - mean_absolute_error: 1.2103
Epoch 38/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0383 - mean_absolute_error: 1.2252
Epoch 39/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9198 - mean_absolute_error: 1.1899
Epoch 40/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0487 - mean_absolute_error: 1.2345
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9414 - mean_absolute_error: 1.2032
Epoch 42/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0690 - mean_absolute_error: 1.2349
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9853 - mean_absolute_error: 1.2096
Epoch 44/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0566 - mean_absolute_error: 1.2357
Epoch 45/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9258 - mean_absolute_error: 1.1879
Epoch 46/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1066 - mean_absolute_error: 1.2537
Epoch 47/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9341 - mean_absolute_error: 1.1950
Epoch 48/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0422 - mean_absolute_error: 1.2332
Epoch 49/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0400 - mean_absolute_error: 1.2317
Epoch 50/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0445 - mean_absolute_error: 1.2228
Epoch 51/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0826 - mean_absolute_error: 1.2459
Epoch 52/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0029 - mean_absolute_error: 1.2216
Epoch 53/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9754 - mean_absolute_error: 1.2031
Epoch 54/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0121 - mean_absolute_error: 1.2181
Epoch 55/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9646 - mean_absolute_error: 1.1982
Epoch 56/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Epoch 57/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0515 - mean_absolute_error: 1.2341
Epoch 58/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0575 - mean_absolute_error: 1.2278
Epoch 59/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0607 - mean_absolute_error: 1.2288
Epoch 60/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0505 - mean_absolute_error: 1.2373
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1451 - mean_absolute_error: 1.2664
Epoch 62/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0018 - mean_absolute_error: 1.2039
Epoch 63/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0192 - mean_absolute_error: 1.2083
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9903 - mean_absolute_error: 1.2087
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9671 - mean_absolute_error: 1.2066
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0114 - mean_absolute_error: 1.2267
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0289 - mean_absolute_error: 1.2232
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9840 - mean_absolute_error: 1.2062
Epoch 69/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9846 - mean_absolute_error: 1.2040
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9932 - mean_absolute_error: 1.2064
Epoch 71/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9565 - mean_absolute_error: 1.2056
Epoch 72/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9809 - mean_absolute_error: 1.2099
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0010 - mean_absolute_error: 1.2146
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1020 - mean_absolute_error: 1.2439
Epoch 75/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0110 - mean_absolute_error: 1.2208
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0645 - mean_absolute_error: 1.2334
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9485 - mean_absolute_error: 1.1995
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0156 - mean_absolute_error: 1.2270
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0070 - mean_absolute_error: 1.2207
Epoch 80/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9366 - mean_absolute_error: 1.1976
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9786 - mean_absolute_error: 1.2017
Epoch 82/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0163 - mean_absolute_error: 1.2193
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0383 - mean_absolute_error: 1.2230
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8856 - mean_absolute_error: 1.1744
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0153 - mean_absolute_error: 1.2178
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9449 - mean_absolute_error: 1.1853
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0998 - mean_absolute_error: 1.2508
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0181 - mean_absolute_error: 1.2183
Epoch 89/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9554 - mean_absolute_error: 1.1983
Epoch 90/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9903 - mean_absolute_error: 1.2160
Epoch 91/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0616 - mean_absolute_error: 1.2326
Epoch 92/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9493 - mean_absolute_error: 1.2004
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0335 - mean_absolute_error: 1.2228
Epoch 94/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0278 - mean_absolute_error: 1.2187
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0328 - mean_absolute_error: 1.2215
Epoch 96/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0691 - mean_absolute_error: 1.2330
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0147 - mean_absolute_error: 1.2147
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0497 - mean_absolute_error: 1.2272
Epoch 99/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9905 - mean_absolute_error: 1.2142
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9732 - mean_absolute_error: 1.2118
In [93]:
reg_model1.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[93]:
[2.0075135231018066, 1.2158657312393188]

1. Adding Two Hidden Layers to Model

In [94]:
# Initialize Sequential model
model2 = Sequential()

# Input Layer
model2.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding two Hidden layers
model2.add(Dense(10, activation='elu', kernel_initializer = 'normal'))    # 2nd layer
model2.add(Dense(4, activation='elu', kernel_initializer = 'normal'))    # 3rd layer

#Output layer
model2.add(Dense(1, activation='softmax', kernel_initializer = 'normal'))

adam1 = optimizers.Adam(lr = 0.01)
model2.compile(optimizer = adam1, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [95]:
model2.summary()
Model: "sequential_4"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_14 (Dense)             (None, 9)                 135       
_________________________________________________________________
dense_15 (Dense)             (None, 10)                100       
_________________________________________________________________
dense_16 (Dense)             (None, 4)                 44        
_________________________________________________________________
dense_17 (Dense)             (None, 1)                 5         
=================================================================
Total params: 284
Trainable params: 284
Non-trainable params: 0
_________________________________________________________________
In [96]:
model2.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 2.1038 - mean_absolute_error: 1.2460
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9014 - mean_absolute_error: 1.1856
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0135 - mean_absolute_error: 1.2091
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9895 - mean_absolute_error: 1.2044
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9787 - mean_absolute_error: 1.2017
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9957 - mean_absolute_error: 1.2112
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9159 - mean_absolute_error: 1.1832
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0395 - mean_absolute_error: 1.2249
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0669 - mean_absolute_error: 1.2362
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0532 - mean_absolute_error: 1.2266
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0491 - mean_absolute_error: 1.2219
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9583 - mean_absolute_error: 1.1896
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9326 - mean_absolute_error: 1.1880
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0236 - mean_absolute_error: 1.2187
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9980 - mean_absolute_error: 1.2181
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0155 - mean_absolute_error: 1.2206
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9784 - mean_absolute_error: 1.2047
Epoch 18/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0233 - mean_absolute_error: 1.2274
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0624 - mean_absolute_error: 1.2356
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0716 - mean_absolute_error: 1.2352
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9763 - mean_absolute_error: 1.2105
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0305 - mean_absolute_error: 1.2263
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0631 - mean_absolute_error: 1.2255
Epoch 24/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9538 - mean_absolute_error: 1.2054
Epoch 25/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9759 - mean_absolute_error: 1.1967
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0091 - mean_absolute_error: 1.2202
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9287 - mean_absolute_error: 1.1943
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0615 - mean_absolute_error: 1.2335
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9387 - mean_absolute_error: 1.1836
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0032 - mean_absolute_error: 1.2068
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0491 - mean_absolute_error: 1.2201
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9731 - mean_absolute_error: 1.2015
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0760 - mean_absolute_error: 1.2389
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9694 - mean_absolute_error: 1.2058
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0655 - mean_absolute_error: 1.2357
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0299 - mean_absolute_error: 1.2204
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0384 - mean_absolute_error: 1.2227
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0370 - mean_absolute_error: 1.2301
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1032 - mean_absolute_error: 1.2473
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0630 - mean_absolute_error: 1.2439
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9684 - mean_absolute_error: 1.2077
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0668 - mean_absolute_error: 1.2460
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9919 - mean_absolute_error: 1.2090
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9890 - mean_absolute_error: 1.2107
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8995 - mean_absolute_error: 1.1801
Epoch 46/100
34/34 [==============================] - 0s 3ms/step - loss: 1.9962 - mean_absolute_error: 1.2108
Epoch 47/100
34/34 [==============================] - 0s 3ms/step - loss: 2.0744 - mean_absolute_error: 1.2432
Epoch 48/100
34/34 [==============================] - 0s 3ms/step - loss: 1.9353 - mean_absolute_error: 1.1913
Epoch 49/100
34/34 [==============================] - 0s 3ms/step - loss: 1.9633 - mean_absolute_error: 1.2044
Epoch 50/100
34/34 [==============================] - 0s 3ms/step - loss: 1.9957 - mean_absolute_error: 1.2089
Epoch 51/100
34/34 [==============================] - 0s 3ms/step - loss: 2.1324 - mean_absolute_error: 1.2572
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9807 - mean_absolute_error: 1.2066
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9554 - mean_absolute_error: 1.1992
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9603 - mean_absolute_error: 1.2024
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0118 - mean_absolute_error: 1.2127
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0757 - mean_absolute_error: 1.2343
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0665 - mean_absolute_error: 1.2320
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8935 - mean_absolute_error: 1.1801
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0351 - mean_absolute_error: 1.2242
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9013 - mean_absolute_error: 1.1799
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0720 - mean_absolute_error: 1.2405
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0124 - mean_absolute_error: 1.2178
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0959 - mean_absolute_error: 1.2408
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9716 - mean_absolute_error: 1.2065
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0479 - mean_absolute_error: 1.2317
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9161 - mean_absolute_error: 1.1911
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0257 - mean_absolute_error: 1.2231
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9387 - mean_absolute_error: 1.1954
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9948 - mean_absolute_error: 1.2157
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8624 - mean_absolute_error: 1.1676
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9856 - mean_absolute_error: 1.2044
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9934 - mean_absolute_error: 1.2122
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0171 - mean_absolute_error: 1.2088
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9746 - mean_absolute_error: 1.2025
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1476 - mean_absolute_error: 1.2630
Epoch 76/100
34/34 [==============================] - 0s 3ms/step - loss: 1.9467 - mean_absolute_error: 1.1960
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9394 - mean_absolute_error: 1.2032
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9860 - mean_absolute_error: 1.2174
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9973 - mean_absolute_error: 1.2152
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1621 - mean_absolute_error: 1.2686
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9937 - mean_absolute_error: 1.2118
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9633 - mean_absolute_error: 1.2034
Epoch 83/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0949 - mean_absolute_error: 1.2476
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1020 - mean_absolute_error: 1.2377
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9639 - mean_absolute_error: 1.1958
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0374 - mean_absolute_error: 1.2199
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9909 - mean_absolute_error: 1.2046
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9890 - mean_absolute_error: 1.2066
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1244 - mean_absolute_error: 1.2518
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9157 - mean_absolute_error: 1.1858
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0632 - mean_absolute_error: 1.2382
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8978 - mean_absolute_error: 1.1759
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9965 - mean_absolute_error: 1.2224
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0341 - mean_absolute_error: 1.2181
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0858 - mean_absolute_error: 1.2502
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9992 - mean_absolute_error: 1.2042
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8928 - mean_absolute_error: 1.1853
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0470 - mean_absolute_error: 1.2319
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0278 - mean_absolute_error: 1.2157
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0236 - mean_absolute_error: 1.2252
Out[96]:
<tensorflow.python.keras.callbacks.History at 0x1a55a8f1d0>
In [97]:
model2.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 2ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[97]:
[2.0075135231018066, 1.2158657312393188]

2. Adding Four Hidden Layers to Model

In [98]:
# Initialize Sequential model
model3 = Sequential()

# Input Layer
model3.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding four Hidden layers
model3.add(Dense(10, activation='tanh', kernel_initializer = 'normal'))    # 2nd layer
model3.add(Dense(4, activation='tanh', kernel_initializer = 'normal'))     # 3rd layer

model3.add(Dense(5, activation='tanh', kernel_initializer = 'normal'))     # 4th layer
model3.add(Dense(12, activation='tanh', kernel_initializer = 'normal'))     # 5th layer


#Output layer
model3.add(Dense(1, activation='softmax', kernel_initializer = 'normal'))

adam2 = optimizers.Adam(lr = 0.01)
model3.compile(optimizer = adam2, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [99]:
model3.summary()
Model: "sequential_5"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_18 (Dense)             (None, 9)                 135       
_________________________________________________________________
dense_19 (Dense)             (None, 10)                100       
_________________________________________________________________
dense_20 (Dense)             (None, 4)                 44        
_________________________________________________________________
dense_21 (Dense)             (None, 5)                 25        
_________________________________________________________________
dense_22 (Dense)             (None, 12)                72        
_________________________________________________________________
dense_23 (Dense)             (None, 1)                 13        
=================================================================
Total params: 389
Trainable params: 389
Non-trainable params: 0
_________________________________________________________________
In [100]:
model3.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 2.0482 - mean_absolute_error: 1.2335
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0196 - mean_absolute_error: 1.2215
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9728 - mean_absolute_error: 1.1969
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0711 - mean_absolute_error: 1.2377
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0009 - mean_absolute_error: 1.2104
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9876 - mean_absolute_error: 1.2089
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1243 - mean_absolute_error: 1.2539
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9741 - mean_absolute_error: 1.2053
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0464 - mean_absolute_error: 1.2346
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0167 - mean_absolute_error: 1.2139
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1159 - mean_absolute_error: 1.2634
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0920 - mean_absolute_error: 1.2485
Epoch 13/100
34/34 [==============================] - 0s 4ms/step - loss: 2.0734 - mean_absolute_error: 1.2338
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0195 - mean_absolute_error: 1.2170
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9269 - mean_absolute_error: 1.1956
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9982 - mean_absolute_error: 1.2162
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0134 - mean_absolute_error: 1.2156
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0694 - mean_absolute_error: 1.2388
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9482 - mean_absolute_error: 1.1965
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9587 - mean_absolute_error: 1.1981
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9590 - mean_absolute_error: 1.1983
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8288 - mean_absolute_error: 1.1647
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0285 - mean_absolute_error: 1.2247
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0657 - mean_absolute_error: 1.2379
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0589 - mean_absolute_error: 1.2380
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1354 - mean_absolute_error: 1.2536
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8079 - mean_absolute_error: 1.1546
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1149 - mean_absolute_error: 1.2546
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0235 - mean_absolute_error: 1.2217
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0321 - mean_absolute_error: 1.2246
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0838 - mean_absolute_error: 1.2408
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9948 - mean_absolute_error: 1.2108
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0164 - mean_absolute_error: 1.2157
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8945 - mean_absolute_error: 1.1832
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9546 - mean_absolute_error: 1.1995
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9753 - mean_absolute_error: 1.2083
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9936 - mean_absolute_error: 1.2118
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9963 - mean_absolute_error: 1.2157
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9180 - mean_absolute_error: 1.1898
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0179 - mean_absolute_error: 1.2198
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0874 - mean_absolute_error: 1.2400
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9613 - mean_absolute_error: 1.1986
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0405 - mean_absolute_error: 1.2256
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0049 - mean_absolute_error: 1.2126
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0414 - mean_absolute_error: 1.2186
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0778 - mean_absolute_error: 1.2462
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9967 - mean_absolute_error: 1.2213
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0267 - mean_absolute_error: 1.2178
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9798 - mean_absolute_error: 1.2137
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0816 - mean_absolute_error: 1.2345
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8848 - mean_absolute_error: 1.1778
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9421 - mean_absolute_error: 1.1978
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0097 - mean_absolute_error: 1.2138
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9512 - mean_absolute_error: 1.1945
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9784 - mean_absolute_error: 1.2070
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0392 - mean_absolute_error: 1.2233
Epoch 57/100
34/34 [==============================] - 0s 3ms/step - loss: 1.9729 - mean_absolute_error: 1.2121
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9843 - mean_absolute_error: 1.2068
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9388 - mean_absolute_error: 1.1956
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9879 - mean_absolute_error: 1.2151
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0333 - mean_absolute_error: 1.2200
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0193 - mean_absolute_error: 1.2270
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9629 - mean_absolute_error: 1.1970
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9786 - mean_absolute_error: 1.1983
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1142 - mean_absolute_error: 1.2474
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9761 - mean_absolute_error: 1.1939
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9683 - mean_absolute_error: 1.2042
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9892 - mean_absolute_error: 1.2066
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0088 - mean_absolute_error: 1.2153
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0025 - mean_absolute_error: 1.2137
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0069 - mean_absolute_error: 1.2148
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9889 - mean_absolute_error: 1.2144
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0486 - mean_absolute_error: 1.2355
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0607 - mean_absolute_error: 1.2402
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0223 - mean_absolute_error: 1.2223
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0374 - mean_absolute_error: 1.2314
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9431 - mean_absolute_error: 1.1912
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1208 - mean_absolute_error: 1.2546
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0659 - mean_absolute_error: 1.2373
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9797 - mean_absolute_error: 1.1995
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9697 - mean_absolute_error: 1.1980
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9493 - mean_absolute_error: 1.2017
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9408 - mean_absolute_error: 1.1958
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0960 - mean_absolute_error: 1.2420
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0452 - mean_absolute_error: 1.2311
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0210 - mean_absolute_error: 1.2219
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0239 - mean_absolute_error: 1.2226
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0869 - mean_absolute_error: 1.2401
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0566 - mean_absolute_error: 1.2344
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0229 - mean_absolute_error: 1.2225
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0408 - mean_absolute_error: 1.2273
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0456 - mean_absolute_error: 1.2262
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9808 - mean_absolute_error: 1.2026
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0464 - mean_absolute_error: 1.2305
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0488 - mean_absolute_error: 1.2292
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0061 - mean_absolute_error: 1.2104
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9758 - mean_absolute_error: 1.1995
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0124 - mean_absolute_error: 1.2176
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0741 - mean_absolute_error: 1.2393
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0456 - mean_absolute_error: 1.2218
Out[100]:
<tensorflow.python.keras.callbacks.History at 0x1a55be8290>
In [101]:
model3.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[101]:
[2.0075135231018066, 1.2158657312393188]

C. -----------------RMSProp Optimizer-----------------

In [102]:
# Rmsprop Neural Network regression model before pca

reg_model2 = Sequential()

# Input Layer

reg_model2.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Output Layer

reg_model2.add(Dense(1, kernel_initializer = 'normal', activation = 'softmax'))

rms = optimizers.RMSprop(lr = 0.01)
reg_model2.compile(optimizer = rms, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [103]:
reg_model2.summary()
Model: "sequential_6"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_24 (Dense)             (None, 9)                 135       
_________________________________________________________________
dense_25 (Dense)             (None, 1)                 10        
=================================================================
Total params: 145
Trainable params: 145
Non-trainable params: 0
_________________________________________________________________
In [104]:
history2 = reg_model2.fit(X_train_sd, y_train, epochs = 100, verbose = 1)
Epoch 1/100
34/34 [==============================] - 1s 1ms/step - loss: 2.0001 - mean_absolute_error: 1.2169
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0854 - mean_absolute_error: 1.2433
Epoch 3/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9933 - mean_absolute_error: 1.2059
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9997 - mean_absolute_error: 1.2123
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0883 - mean_absolute_error: 1.2452
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0166 - mean_absolute_error: 1.2334
Epoch 7/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0529 - mean_absolute_error: 1.2352
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9833 - mean_absolute_error: 1.2113
Epoch 9/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0421 - mean_absolute_error: 1.2204
Epoch 10/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0493 - mean_absolute_error: 1.2345
Epoch 11/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0275 - mean_absolute_error: 1.2312
Epoch 12/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0230 - mean_absolute_error: 1.2279
Epoch 13/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0597 - mean_absolute_error: 1.2299
Epoch 14/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9170 - mean_absolute_error: 1.1955
Epoch 15/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9491 - mean_absolute_error: 1.1879
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9706 - mean_absolute_error: 1.2011
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9755 - mean_absolute_error: 1.2098
Epoch 18/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0045 - mean_absolute_error: 1.2061
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9576 - mean_absolute_error: 1.2030
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9914 - mean_absolute_error: 1.2100
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0905 - mean_absolute_error: 1.2476
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0780 - mean_absolute_error: 1.2408
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9624 - mean_absolute_error: 1.2037
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0487 - mean_absolute_error: 1.2300
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0249 - mean_absolute_error: 1.2139
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0247 - mean_absolute_error: 1.2170
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9223 - mean_absolute_error: 1.1818
Epoch 28/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0905 - mean_absolute_error: 1.2339
Epoch 29/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9867 - mean_absolute_error: 1.2040
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9446 - mean_absolute_error: 1.1878
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0234 - mean_absolute_error: 1.2115
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9874 - mean_absolute_error: 1.2113
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0289 - mean_absolute_error: 1.2175
Epoch 34/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0072 - mean_absolute_error: 1.2132
Epoch 35/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0198 - mean_absolute_error: 1.2217
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0208 - mean_absolute_error: 1.2194
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9488 - mean_absolute_error: 1.1890
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9867 - mean_absolute_error: 1.2077
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9237 - mean_absolute_error: 1.1919
Epoch 40/100
34/34 [==============================] - ETA: 0s - loss: 2.0405 - mean_absolute_error: 1.224 - 0s 2ms/step - loss: 2.0388 - mean_absolute_error: 1.2245
Epoch 41/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9283 - mean_absolute_error: 1.1912
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9878 - mean_absolute_error: 1.2088
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9230 - mean_absolute_error: 1.1862
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9003 - mean_absolute_error: 1.1760
Epoch 45/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0367 - mean_absolute_error: 1.2271
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9505 - mean_absolute_error: 1.1963
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0426 - mean_absolute_error: 1.2237
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0059 - mean_absolute_error: 1.2129
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0241 - mean_absolute_error: 1.2211
Epoch 50/100
34/34 [==============================] - 0s 3ms/step - loss: 2.0783 - mean_absolute_error: 1.2388
Epoch 51/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0915 - mean_absolute_error: 1.2392
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9556 - mean_absolute_error: 1.2007
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9858 - mean_absolute_error: 1.2084
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0034 - mean_absolute_error: 1.2067
Epoch 55/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0504 - mean_absolute_error: 1.2336
Epoch 56/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0395 - mean_absolute_error: 1.2245
Epoch 57/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0330 - mean_absolute_error: 1.2184
Epoch 58/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9998 - mean_absolute_error: 1.2134
Epoch 59/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0722 - mean_absolute_error: 1.2353
Epoch 60/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9217 - mean_absolute_error: 1.1911
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0143 - mean_absolute_error: 1.2188
Epoch 62/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0884 - mean_absolute_error: 1.2405
Epoch 63/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0199 - mean_absolute_error: 1.2252
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0485 - mean_absolute_error: 1.2264
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0502 - mean_absolute_error: 1.2258
Epoch 66/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8995 - mean_absolute_error: 1.1748
Epoch 67/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0222 - mean_absolute_error: 1.2205
Epoch 68/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0140 - mean_absolute_error: 1.2185
Epoch 69/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9988 - mean_absolute_error: 1.2114
Epoch 70/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9676 - mean_absolute_error: 1.2059
Epoch 71/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0585 - mean_absolute_error: 1.2334
Epoch 72/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9983 - mean_absolute_error: 1.2161
Epoch 73/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0152 - mean_absolute_error: 1.2271
Epoch 74/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0404 - mean_absolute_error: 1.2232
Epoch 75/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9699 - mean_absolute_error: 1.2021
Epoch 76/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0374 - mean_absolute_error: 1.2280
Epoch 77/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9563 - mean_absolute_error: 1.2052
Epoch 78/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0646 - mean_absolute_error: 1.2367
Epoch 79/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0094 - mean_absolute_error: 1.2185
Epoch 80/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9955 - mean_absolute_error: 1.2142
Epoch 81/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0093 - mean_absolute_error: 1.2088
Epoch 82/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9552 - mean_absolute_error: 1.1989
Epoch 83/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9550 - mean_absolute_error: 1.1900
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9466 - mean_absolute_error: 1.1991
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0368 - mean_absolute_error: 1.2293
Epoch 86/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0593 - mean_absolute_error: 1.2245
Epoch 87/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0153 - mean_absolute_error: 1.2257
Epoch 88/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0395 - mean_absolute_error: 1.2287
Epoch 89/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9812 - mean_absolute_error: 1.2065
Epoch 90/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0042 - mean_absolute_error: 1.2164
Epoch 91/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9915 - mean_absolute_error: 1.2128
Epoch 92/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0583 - mean_absolute_error: 1.2296
Epoch 93/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0331 - mean_absolute_error: 1.2275
Epoch 94/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0504 - mean_absolute_error: 1.2239
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9859 - mean_absolute_error: 1.2126
Epoch 96/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9864 - mean_absolute_error: 1.2110
Epoch 97/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0004 - mean_absolute_error: 1.2126
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0176 - mean_absolute_error: 1.2212
Epoch 99/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1321 - mean_absolute_error: 1.2586
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8563 - mean_absolute_error: 1.1596
In [105]:
reg_model2.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[105]:
[2.0075135231018066, 1.2158657312393188]

1. Adding Two Hidden Layers to Model

In [106]:
# Initialize Sequential model
model4 = Sequential()

# Input Layer
model4.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding two Hidden layers
model4.add(Dense(15, activation='sigmoid', kernel_initializer = 'normal'))    # 2nd layer
model4.add(Dense(10, activation='sigmoid', kernel_initializer = 'normal'))    # 3rd layer

#Output layer
model4.add(Dense(1, activation='softmax', kernel_initializer = 'normal'))

rms1 = optimizers.RMSprop(lr = 0.01)
model4.compile(optimizer = rms1, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [107]:
model4.summary()
Model: "sequential_7"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_26 (Dense)             (None, 9)                 135       
_________________________________________________________________
dense_27 (Dense)             (None, 15)                150       
_________________________________________________________________
dense_28 (Dense)             (None, 10)                160       
_________________________________________________________________
dense_29 (Dense)             (None, 1)                 11        
=================================================================
Total params: 456
Trainable params: 456
Non-trainable params: 0
_________________________________________________________________
In [108]:
model4.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 2.0657 - mean_absolute_error: 1.2248
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0200 - mean_absolute_error: 1.2241
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0151 - mean_absolute_error: 1.2247
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9724 - mean_absolute_error: 1.2167
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9679 - mean_absolute_error: 1.2049
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1586 - mean_absolute_error: 1.2730
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0313 - mean_absolute_error: 1.2332
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9911 - mean_absolute_error: 1.2024
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9955 - mean_absolute_error: 1.2195
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9825 - mean_absolute_error: 1.2164
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1177 - mean_absolute_error: 1.2531
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0748 - mean_absolute_error: 1.2428
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1454 - mean_absolute_error: 1.2512
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9985 - mean_absolute_error: 1.2186
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9815 - mean_absolute_error: 1.2128
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0592 - mean_absolute_error: 1.2290
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0621 - mean_absolute_error: 1.2370
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0110 - mean_absolute_error: 1.2253
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0212 - mean_absolute_error: 1.2180
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9175 - mean_absolute_error: 1.1930
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0178 - mean_absolute_error: 1.2242
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9121 - mean_absolute_error: 1.1867
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0364 - mean_absolute_error: 1.2241
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0510 - mean_absolute_error: 1.2293
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9785 - mean_absolute_error: 1.2146
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9630 - mean_absolute_error: 1.1933
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9568 - mean_absolute_error: 1.1922
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9265 - mean_absolute_error: 1.1928
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9440 - mean_absolute_error: 1.1941
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0748 - mean_absolute_error: 1.2394
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0563 - mean_absolute_error: 1.2394
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0676 - mean_absolute_error: 1.2404
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0715 - mean_absolute_error: 1.2442
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9335 - mean_absolute_error: 1.1992
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9385 - mean_absolute_error: 1.1833
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9578 - mean_absolute_error: 1.1960
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0111 - mean_absolute_error: 1.2175
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9489 - mean_absolute_error: 1.1928
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9524 - mean_absolute_error: 1.1975
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0451 - mean_absolute_error: 1.2180
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0052 - mean_absolute_error: 1.2193
Epoch 42/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9866 - mean_absolute_error: 1.2119
Epoch 43/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0099 - mean_absolute_error: 1.2143
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9224 - mean_absolute_error: 1.1923
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9923 - mean_absolute_error: 1.2020
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9900 - mean_absolute_error: 1.2033
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9050 - mean_absolute_error: 1.1831
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0088 - mean_absolute_error: 1.2209
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0321 - mean_absolute_error: 1.2239
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1542 - mean_absolute_error: 1.2673
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8974 - mean_absolute_error: 1.1804
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9682 - mean_absolute_error: 1.2068
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8911 - mean_absolute_error: 1.1700
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9170 - mean_absolute_error: 1.1863
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9795 - mean_absolute_error: 1.2085
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0958 - mean_absolute_error: 1.2426
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9392 - mean_absolute_error: 1.1803
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9439 - mean_absolute_error: 1.2031
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0256 - mean_absolute_error: 1.2179
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0408 - mean_absolute_error: 1.2357
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0119 - mean_absolute_error: 1.2122
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8505 - mean_absolute_error: 1.1625
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9888 - mean_absolute_error: 1.2080
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0440 - mean_absolute_error: 1.2361
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9982 - mean_absolute_error: 1.2181
Epoch 66/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9617 - mean_absolute_error: 1.2002
Epoch 67/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0057 - mean_absolute_error: 1.2247
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0744 - mean_absolute_error: 1.2490
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9533 - mean_absolute_error: 1.1977
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9568 - mean_absolute_error: 1.1932
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0624 - mean_absolute_error: 1.2351
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0285 - mean_absolute_error: 1.2257
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1130 - mean_absolute_error: 1.2542
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0128 - mean_absolute_error: 1.2182
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9564 - mean_absolute_error: 1.1962
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0819 - mean_absolute_error: 1.2404
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1280 - mean_absolute_error: 1.2622
Epoch 78/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9497 - mean_absolute_error: 1.1902
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9800 - mean_absolute_error: 1.1927
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9613 - mean_absolute_error: 1.2035
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9740 - mean_absolute_error: 1.2036
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9986 - mean_absolute_error: 1.2056
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1176 - mean_absolute_error: 1.2561
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0040 - mean_absolute_error: 1.2231
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0450 - mean_absolute_error: 1.2164
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9810 - mean_absolute_error: 1.2111
Epoch 87/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0478 - mean_absolute_error: 1.2352
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9804 - mean_absolute_error: 1.2124
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9637 - mean_absolute_error: 1.2060
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9726 - mean_absolute_error: 1.2005
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9642 - mean_absolute_error: 1.2021
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0432 - mean_absolute_error: 1.2271
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0199 - mean_absolute_error: 1.2256
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9908 - mean_absolute_error: 1.2027
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9663 - mean_absolute_error: 1.2079
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9518 - mean_absolute_error: 1.1907
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0105 - mean_absolute_error: 1.2164
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0167 - mean_absolute_error: 1.2209
Epoch 99/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9595 - mean_absolute_error: 1.1942
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9718 - mean_absolute_error: 1.2046
Out[108]:
<tensorflow.python.keras.callbacks.History at 0x1a55f76910>
In [109]:
model4.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[109]:
[2.0075135231018066, 1.2158657312393188]

2. Adding Four Hidden Layers to Model

In [110]:
# Initialize Sequential model
model5 = Sequential()

# Input Layer
model5.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding four Hidden layers
model5.add(Dense(15, activation='sigmoid', kernel_initializer = 'normal'))    # 2nd layer
model5.add(Dense(10, activation='sigmoid', kernel_initializer = 'normal'))    # 3rd layer

model5.add(Dense(10, activation='sigmoid', kernel_initializer = 'normal'))     # 4th layer
model5.add(Dense(20, activation='sigmoid', kernel_initializer = 'normal'))     # 5th layer


#Output layer
model5.add(Dense(1, activation='softmax', kernel_initializer = 'normal'))

rms2 = optimizers.RMSprop(lr = 0.01)
model5.compile(optimizer = rms2, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [111]:
model5.summary()
Model: "sequential_8"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_30 (Dense)             (None, 9)                 135       
_________________________________________________________________
dense_31 (Dense)             (None, 15)                150       
_________________________________________________________________
dense_32 (Dense)             (None, 10)                160       
_________________________________________________________________
dense_33 (Dense)             (None, 10)                110       
_________________________________________________________________
dense_34 (Dense)             (None, 20)                220       
_________________________________________________________________
dense_35 (Dense)             (None, 1)                 21        
=================================================================
Total params: 796
Trainable params: 796
Non-trainable params: 0
_________________________________________________________________
In [112]:
model5.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 2.0798 - mean_absolute_error: 1.2457
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0131 - mean_absolute_error: 1.2179
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0148 - mean_absolute_error: 1.2179
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0357 - mean_absolute_error: 1.2266
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0939 - mean_absolute_error: 1.2352
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9770 - mean_absolute_error: 1.2094
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0104 - mean_absolute_error: 1.2155
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0142 - mean_absolute_error: 1.2209
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9310 - mean_absolute_error: 1.1882
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0301 - mean_absolute_error: 1.2311
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0298 - mean_absolute_error: 1.2159
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9223 - mean_absolute_error: 1.1804
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9647 - mean_absolute_error: 1.2042
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1041 - mean_absolute_error: 1.2547
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9515 - mean_absolute_error: 1.1978
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1283 - mean_absolute_error: 1.2543
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9571 - mean_absolute_error: 1.2012
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9399 - mean_absolute_error: 1.2021
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0514 - mean_absolute_error: 1.2290
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9845 - mean_absolute_error: 1.2102
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9965 - mean_absolute_error: 1.2060
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9689 - mean_absolute_error: 1.2085
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0046 - mean_absolute_error: 1.2103
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8789 - mean_absolute_error: 1.1718
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0962 - mean_absolute_error: 1.2444
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0183 - mean_absolute_error: 1.2188
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0312 - mean_absolute_error: 1.2217
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9961 - mean_absolute_error: 1.2091
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8707 - mean_absolute_error: 1.1746
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9754 - mean_absolute_error: 1.2108
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0657 - mean_absolute_error: 1.2383
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0726 - mean_absolute_error: 1.2366
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1047 - mean_absolute_error: 1.2508
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0453 - mean_absolute_error: 1.2317
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0581 - mean_absolute_error: 1.2293
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9815 - mean_absolute_error: 1.2141
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0383 - mean_absolute_error: 1.2203
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0495 - mean_absolute_error: 1.2256
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0494 - mean_absolute_error: 1.2227
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9252 - mean_absolute_error: 1.1930
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0090 - mean_absolute_error: 1.2188
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0111 - mean_absolute_error: 1.2270
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9810 - mean_absolute_error: 1.2025
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9694 - mean_absolute_error: 1.2066
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0401 - mean_absolute_error: 1.2278
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9968 - mean_absolute_error: 1.2153
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0295 - mean_absolute_error: 1.2318
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9648 - mean_absolute_error: 1.2030
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9652 - mean_absolute_error: 1.2061
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9973 - mean_absolute_error: 1.2141
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0837 - mean_absolute_error: 1.2488
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0356 - mean_absolute_error: 1.2237
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1238 - mean_absolute_error: 1.2518
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9906 - mean_absolute_error: 1.2041
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0056 - mean_absolute_error: 1.2198
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0448 - mean_absolute_error: 1.2280
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9929 - mean_absolute_error: 1.2104
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0011 - mean_absolute_error: 1.2142
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0019 - mean_absolute_error: 1.2173
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0274 - mean_absolute_error: 1.2264
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9901 - mean_absolute_error: 1.2102
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9876 - mean_absolute_error: 1.2058
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9619 - mean_absolute_error: 1.1976
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0132 - mean_absolute_error: 1.2214
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0701 - mean_absolute_error: 1.2336
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9638 - mean_absolute_error: 1.2000
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1372 - mean_absolute_error: 1.2585
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9775 - mean_absolute_error: 1.2032
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0492 - mean_absolute_error: 1.2199
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0257 - mean_absolute_error: 1.2319
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9459 - mean_absolute_error: 1.1845
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0218 - mean_absolute_error: 1.2206
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1151 - mean_absolute_error: 1.2564
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0151 - mean_absolute_error: 1.2080
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0656 - mean_absolute_error: 1.2373
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9994 - mean_absolute_error: 1.2159
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0402 - mean_absolute_error: 1.2264
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0883 - mean_absolute_error: 1.2393
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0239 - mean_absolute_error: 1.2281
Epoch 80/100
34/34 [==============================] - 0s 3ms/step - loss: 2.0667 - mean_absolute_error: 1.2321A: 0s - loss: 2.1025 - mean_absolute_error: 1.241
Epoch 81/100
34/34 [==============================] - 0s 3ms/step - loss: 1.9134 - mean_absolute_error: 1.1772
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0750 - mean_absolute_error: 1.2391
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9978 - mean_absolute_error: 1.2187
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9250 - mean_absolute_error: 1.1912
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9445 - mean_absolute_error: 1.2011
Epoch 86/100
34/34 [==============================] - 0s 3ms/step - loss: 2.0169 - mean_absolute_error: 1.2224
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0611 - mean_absolute_error: 1.2392
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0170 - mean_absolute_error: 1.2167
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9402 - mean_absolute_error: 1.1938
Epoch 90/100
34/34 [==============================] - 0s 3ms/step - loss: 1.9510 - mean_absolute_error: 1.1920
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9304 - mean_absolute_error: 1.1876
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9305 - mean_absolute_error: 1.2016
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9273 - mean_absolute_error: 1.1917
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1042 - mean_absolute_error: 1.2523
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0108 - mean_absolute_error: 1.2211
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0272 - mean_absolute_error: 1.2319
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9869 - mean_absolute_error: 1.2116
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0354 - mean_absolute_error: 1.2226
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9555 - mean_absolute_error: 1.1965
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9599 - mean_absolute_error: 1.1898
Out[112]:
<tensorflow.python.keras.callbacks.History at 0x1a56172e10>
In [113]:
model5.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[113]:
[2.0075135231018066, 1.2158657312393188]

II ] Neural Network Models After PCA

A. ------------------SGD Optimizer------------------

In [114]:
# SGD Neural Network regression model after pca

reg_model3 = Sequential()

# Input Layer

reg_model3.add(Dense (1, input_dim = 2, kernel_initializer = 'normal', activation = 'relu'))

# Output Layer

reg_model3.add(Dense(1, kernel_initializer = 'normal', activation = 'softmax'))

sgd3 = optimizers.SGD(lr = 0.01)
reg_model3.compile(optimizer = sgd3, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [115]:
reg_model3.summary()
Model: "sequential_9"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_36 (Dense)             (None, 1)                 3         
_________________________________________________________________
dense_37 (Dense)             (None, 1)                 2         
=================================================================
Total params: 5
Trainable params: 5
Non-trainable params: 0
_________________________________________________________________
In [116]:
history3 = reg_model3.fit(X_sd_pca, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9490 - mean_absolute_error: 1.2008
Epoch 2/100
34/34 [==============================] - 0s 3ms/step - loss: 1.9240 - mean_absolute_error: 1.1881
Epoch 3/100
34/34 [==============================] - 0s 3ms/step - loss: 1.8887 - mean_absolute_error: 1.1725
Epoch 4/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9237 - mean_absolute_error: 1.1864
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9181 - mean_absolute_error: 1.1757
Epoch 6/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9890 - mean_absolute_error: 1.2099
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9936 - mean_absolute_error: 1.2135
Epoch 8/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0112 - mean_absolute_error: 1.2074
Epoch 9/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9833 - mean_absolute_error: 1.1956
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0616 - mean_absolute_error: 1.2350
Epoch 11/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0423 - mean_absolute_error: 1.2268
Epoch 12/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0017 - mean_absolute_error: 1.2070
Epoch 13/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9685 - mean_absolute_error: 1.2004
Epoch 14/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0252 - mean_absolute_error: 1.2246
Epoch 15/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0038 - mean_absolute_error: 1.2130
Epoch 16/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9684 - mean_absolute_error: 1.2053
Epoch 17/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0064 - mean_absolute_error: 1.2202
Epoch 18/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9777 - mean_absolute_error: 1.2074
Epoch 19/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9859 - mean_absolute_error: 1.2054
Epoch 20/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0679 - mean_absolute_error: 1.2325
Epoch 21/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0392 - mean_absolute_error: 1.2261
Epoch 22/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0179 - mean_absolute_error: 1.2143
Epoch 23/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9003 - mean_absolute_error: 1.1763
Epoch 24/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0695 - mean_absolute_error: 1.2378
Epoch 25/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9656 - mean_absolute_error: 1.1988
Epoch 26/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0471 - mean_absolute_error: 1.2292
Epoch 27/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0185 - mean_absolute_error: 1.2073
Epoch 28/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1923 - mean_absolute_error: 1.2691
Epoch 29/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0488 - mean_absolute_error: 1.2346
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0612 - mean_absolute_error: 1.2380
Epoch 31/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9855 - mean_absolute_error: 1.2150
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9832 - mean_absolute_error: 1.2141
Epoch 33/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0561 - mean_absolute_error: 1.2368
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0634 - mean_absolute_error: 1.2274
Epoch 35/100
34/34 [==============================] - 0s 3ms/step - loss: 2.1432 - mean_absolute_error: 1.2675
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9741 - mean_absolute_error: 1.2094
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9262 - mean_absolute_error: 1.1925
Epoch 38/100
34/34 [==============================] - 0s 3ms/step - loss: 1.9239 - mean_absolute_error: 1.1878
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0360 - mean_absolute_error: 1.2138
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0200 - mean_absolute_error: 1.2347
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0258 - mean_absolute_error: 1.2136
Epoch 42/100
34/34 [==============================] - 0s 3ms/step - loss: 2.0068 - mean_absolute_error: 1.2205
Epoch 43/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9222 - mean_absolute_error: 1.1900
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9833 - mean_absolute_error: 1.2136
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1770 - mean_absolute_error: 1.2705
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0301 - mean_absolute_error: 1.2254
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1303 - mean_absolute_error: 1.2588
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9299 - mean_absolute_error: 1.1819
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9961 - mean_absolute_error: 1.2095
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8866 - mean_absolute_error: 1.1778
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0667 - mean_absolute_error: 1.2257
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0370 - mean_absolute_error: 1.2304
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8980 - mean_absolute_error: 1.1719
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0496 - mean_absolute_error: 1.2391
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0346 - mean_absolute_error: 1.2215
Epoch 56/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0539 - mean_absolute_error: 1.2332
Epoch 57/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0020 - mean_absolute_error: 1.2153
Epoch 58/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9317 - mean_absolute_error: 1.1778
Epoch 59/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9898 - mean_absolute_error: 1.2146
Epoch 60/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9976 - mean_absolute_error: 1.2150
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0044 - mean_absolute_error: 1.2206
Epoch 62/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9335 - mean_absolute_error: 1.1914
Epoch 63/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0980 - mean_absolute_error: 1.2425
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0533 - mean_absolute_error: 1.2399
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9657 - mean_absolute_error: 1.1979
Epoch 66/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0181 - mean_absolute_error: 1.2105
Epoch 67/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9529 - mean_absolute_error: 1.1993
Epoch 68/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9514 - mean_absolute_error: 1.1897
Epoch 69/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9704 - mean_absolute_error: 1.1963
Epoch 70/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0362 - mean_absolute_error: 1.2217
Epoch 71/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9735 - mean_absolute_error: 1.2068
Epoch 72/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0543 - mean_absolute_error: 1.2423
Epoch 73/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1318 - mean_absolute_error: 1.2595
Epoch 74/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9746 - mean_absolute_error: 1.2071
Epoch 75/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0691 - mean_absolute_error: 1.2330
Epoch 76/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0218 - mean_absolute_error: 1.2165
Epoch 77/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9846 - mean_absolute_error: 1.2117
Epoch 78/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0220 - mean_absolute_error: 1.2248
Epoch 79/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0271 - mean_absolute_error: 1.2233
Epoch 80/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9651 - mean_absolute_error: 1.2009
Epoch 81/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9708 - mean_absolute_error: 1.2071
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8807 - mean_absolute_error: 1.1642
Epoch 83/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0113 - mean_absolute_error: 1.2200
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0354 - mean_absolute_error: 1.2286
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9187 - mean_absolute_error: 1.1847
Epoch 86/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9924 - mean_absolute_error: 1.2157
Epoch 87/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9848 - mean_absolute_error: 1.2111
Epoch 88/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9872 - mean_absolute_error: 1.2047
Epoch 89/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9308 - mean_absolute_error: 1.1867
Epoch 90/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9801 - mean_absolute_error: 1.2121
Epoch 91/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9919 - mean_absolute_error: 1.2047
Epoch 92/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0685 - mean_absolute_error: 1.2431
Epoch 93/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0272 - mean_absolute_error: 1.2257
Epoch 94/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9209 - mean_absolute_error: 1.1861
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9997 - mean_absolute_error: 1.2085
Epoch 96/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0550 - mean_absolute_error: 1.2291
Epoch 97/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9425 - mean_absolute_error: 1.1960
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1220 - mean_absolute_error: 1.2604
Epoch 99/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8243 - mean_absolute_error: 1.1473
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9720 - mean_absolute_error: 1.1917
In [117]:
reg_model3.evaluate(X_sd_pca, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[117]:
[2.0075135231018066, 1.2158657312393188]

1. Adding Two Hidden Layers to Model

In [118]:
# Initialize Sequential model
model6 = Sequential()

# Input Layer
model6.add(Dense (1, input_dim = 2, kernel_initializer = 'normal', activation = 'relu'))

# Adding two Hidden layers
model6.add(Dense(6, activation='tanh', kernel_initializer = 'normal'))    # 2nd layer
model6.add(Dense(5, activation='tanh', kernel_initializer = 'normal'))    # 3rd layer

#Output layer
model6.add(Dense(1, activation='softmax', kernel_initializer = 'normal'))

sgd4 = optimizers.SGD(lr = 0.01)
model6.compile(optimizer = sgd4, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [119]:
model6.summary()
Model: "sequential_10"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_38 (Dense)             (None, 1)                 3         
_________________________________________________________________
dense_39 (Dense)             (None, 6)                 12        
_________________________________________________________________
dense_40 (Dense)             (None, 5)                 35        
_________________________________________________________________
dense_41 (Dense)             (None, 1)                 6         
=================================================================
Total params: 56
Trainable params: 56
Non-trainable params: 0
_________________________________________________________________
In [120]:
model6.fit(X_sd_pca, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1452 - mean_absolute_error: 1.2588
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0090 - mean_absolute_error: 1.2192
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9970 - mean_absolute_error: 1.2059
Epoch 4/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0017 - mean_absolute_error: 1.2219
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9874 - mean_absolute_error: 1.2116
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9806 - mean_absolute_error: 1.2067
Epoch 7/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9929 - mean_absolute_error: 1.2074
Epoch 8/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9547 - mean_absolute_error: 1.1983
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8551 - mean_absolute_error: 1.1561
Epoch 10/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8933 - mean_absolute_error: 1.1745
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8888 - mean_absolute_error: 1.1666
Epoch 12/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9730 - mean_absolute_error: 1.2121
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9981 - mean_absolute_error: 1.2108
Epoch 14/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0603 - mean_absolute_error: 1.2279
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0166 - mean_absolute_error: 1.2181
Epoch 16/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0279 - mean_absolute_error: 1.2245
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0583 - mean_absolute_error: 1.2369
Epoch 18/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9369 - mean_absolute_error: 1.1856
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9736 - mean_absolute_error: 1.2077
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9180 - mean_absolute_error: 1.1888
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9371 - mean_absolute_error: 1.1863
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0234 - mean_absolute_error: 1.2081
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9876 - mean_absolute_error: 1.2041
Epoch 24/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0540 - mean_absolute_error: 1.2242
Epoch 25/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9602 - mean_absolute_error: 1.2023
Epoch 26/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0076 - mean_absolute_error: 1.2136
Epoch 27/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9183 - mean_absolute_error: 1.1900
Epoch 28/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9944 - mean_absolute_error: 1.2072
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1595 - mean_absolute_error: 1.2590
Epoch 30/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9863 - mean_absolute_error: 1.2029
Epoch 31/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0038 - mean_absolute_error: 1.2026
Epoch 32/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0506 - mean_absolute_error: 1.2382
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9892 - mean_absolute_error: 1.2139
Epoch 34/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9266 - mean_absolute_error: 1.1958
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0009 - mean_absolute_error: 1.2203
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0190 - mean_absolute_error: 1.2167
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9732 - mean_absolute_error: 1.1980
Epoch 38/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1265 - mean_absolute_error: 1.2485
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0889 - mean_absolute_error: 1.2344
Epoch 40/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9193 - mean_absolute_error: 1.1856
Epoch 41/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9338 - mean_absolute_error: 1.1862
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1034 - mean_absolute_error: 1.2509
Epoch 43/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0942 - mean_absolute_error: 1.2501
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0130 - mean_absolute_error: 1.2167
Epoch 45/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9989 - mean_absolute_error: 1.2140
Epoch 46/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0122 - mean_absolute_error: 1.2081
Epoch 47/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0798 - mean_absolute_error: 1.2366
Epoch 48/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9926 - mean_absolute_error: 1.2120
Epoch 49/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9749 - mean_absolute_error: 1.2014
Epoch 50/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8852 - mean_absolute_error: 1.1733
Epoch 51/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0190 - mean_absolute_error: 1.2201
Epoch 52/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9853 - mean_absolute_error: 1.2169
Epoch 53/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9394 - mean_absolute_error: 1.1952
Epoch 54/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9418 - mean_absolute_error: 1.1929
Epoch 55/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0093 - mean_absolute_error: 1.2109
Epoch 56/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0848 - mean_absolute_error: 1.2479
Epoch 57/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8585 - mean_absolute_error: 1.1625
Epoch 58/100
34/34 [==============================] - ETA: 0s - loss: 1.4914 - mean_absolute_error: 1.042 - 0s 1ms/step - loss: 1.9922 - mean_absolute_error: 1.2162
Epoch 59/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0357 - mean_absolute_error: 1.2098
Epoch 60/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0321 - mean_absolute_error: 1.2279
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9959 - mean_absolute_error: 1.2104
Epoch 62/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0263 - mean_absolute_error: 1.2296
Epoch 63/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9767 - mean_absolute_error: 1.2056
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9685 - mean_absolute_error: 1.1949
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0704 - mean_absolute_error: 1.2339
Epoch 66/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8585 - mean_absolute_error: 1.1774
Epoch 67/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9851 - mean_absolute_error: 1.2069
Epoch 68/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9922 - mean_absolute_error: 1.2067
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0909 - mean_absolute_error: 1.2402
Epoch 70/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9672 - mean_absolute_error: 1.2079
Epoch 71/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9438 - mean_absolute_error: 1.1942
Epoch 72/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0482 - mean_absolute_error: 1.2352
Epoch 73/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0859 - mean_absolute_error: 1.2416
Epoch 74/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9921 - mean_absolute_error: 1.2121
Epoch 75/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9052 - mean_absolute_error: 1.1785
Epoch 76/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1327 - mean_absolute_error: 1.2582
Epoch 77/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9911 - mean_absolute_error: 1.2067
Epoch 78/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0730 - mean_absolute_error: 1.2333
Epoch 79/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9413 - mean_absolute_error: 1.2027
Epoch 80/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9685 - mean_absolute_error: 1.2051
Epoch 81/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9506 - mean_absolute_error: 1.1908
Epoch 82/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0621 - mean_absolute_error: 1.2274
Epoch 83/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1015 - mean_absolute_error: 1.2429
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9651 - mean_absolute_error: 1.2027
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9650 - mean_absolute_error: 1.2137
Epoch 86/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9364 - mean_absolute_error: 1.1892
Epoch 87/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9356 - mean_absolute_error: 1.1978
Epoch 88/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0800 - mean_absolute_error: 1.2357
Epoch 89/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0289 - mean_absolute_error: 1.2317
Epoch 90/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0523 - mean_absolute_error: 1.2226
Epoch 91/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0395 - mean_absolute_error: 1.2248
Epoch 92/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9475 - mean_absolute_error: 1.1956
Epoch 93/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9130 - mean_absolute_error: 1.1913
Epoch 94/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9867 - mean_absolute_error: 1.2114
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9976 - mean_absolute_error: 1.2224
Epoch 96/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9559 - mean_absolute_error: 1.1921
Epoch 97/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0395 - mean_absolute_error: 1.2250
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0936 - mean_absolute_error: 1.2461
Epoch 99/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9267 - mean_absolute_error: 1.1917
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9809 - mean_absolute_error: 1.2094
Out[120]:
<tensorflow.python.keras.callbacks.History at 0x1a5651e6d0>
In [121]:
model6.evaluate(X_sd_pca, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[121]:
[2.0075135231018066, 1.2158657312393188]

2. Adding Four Hidden Layers to Model

In [122]:
# Initialize Sequential model
model7 = Sequential()

# Input Layer
model7.add(Dense (1, input_dim = 2, kernel_initializer = 'normal', activation = 'relu'))

# Adding four Hidden layers
model7.add(Dense(10, activation='sigmoid', kernel_initializer = 'normal'))    # 2nd layer
model7.add(Dense(12, activation='sigmoid', kernel_initializer = 'normal'))    # 3rd layer

model7.add(Dense(8, activation='sigmoid', kernel_initializer = 'normal'))     # 4th layer 
model7.add(Dense(6, activation='sigmoid', kernel_initializer = 'normal'))     # 5th layer


#Output layer
model7.add(Dense(1, activation='softmax', kernel_initializer = 'normal'))

sgd5 = optimizers.SGD(lr = 0.01)
model7.compile(optimizer = sgd5, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [123]:
model7.summary()
Model: "sequential_11"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_42 (Dense)             (None, 1)                 3         
_________________________________________________________________
dense_43 (Dense)             (None, 10)                20        
_________________________________________________________________
dense_44 (Dense)             (None, 12)                132       
_________________________________________________________________
dense_45 (Dense)             (None, 8)                 104       
_________________________________________________________________
dense_46 (Dense)             (None, 6)                 54        
_________________________________________________________________
dense_47 (Dense)             (None, 1)                 7         
=================================================================
Total params: 320
Trainable params: 320
Non-trainable params: 0
_________________________________________________________________
In [124]:
model7.fit(X_sd_pca, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9593 - mean_absolute_error: 1.1988
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9809 - mean_absolute_error: 1.2034
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9751 - mean_absolute_error: 1.1991
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0841 - mean_absolute_error: 1.2185
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0438 - mean_absolute_error: 1.2274
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9845 - mean_absolute_error: 1.2012
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1321 - mean_absolute_error: 1.2437
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9909 - mean_absolute_error: 1.2099
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0075 - mean_absolute_error: 1.2121
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0461 - mean_absolute_error: 1.2260
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0520 - mean_absolute_error: 1.2263
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0565 - mean_absolute_error: 1.2335
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0297 - mean_absolute_error: 1.2259
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0366 - mean_absolute_error: 1.2174
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9550 - mean_absolute_error: 1.1941
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9533 - mean_absolute_error: 1.1987
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9951 - mean_absolute_error: 1.2143
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9375 - mean_absolute_error: 1.1925
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9698 - mean_absolute_error: 1.2053
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0415 - mean_absolute_error: 1.2219
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9076 - mean_absolute_error: 1.1814
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9892 - mean_absolute_error: 1.2141
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0628 - mean_absolute_error: 1.2392
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0309 - mean_absolute_error: 1.2354
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9149 - mean_absolute_error: 1.1864
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0755 - mean_absolute_error: 1.2400
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0229 - mean_absolute_error: 1.2140
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0434 - mean_absolute_error: 1.2303
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0391 - mean_absolute_error: 1.2333
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9927 - mean_absolute_error: 1.2179
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0533 - mean_absolute_error: 1.2241
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0217 - mean_absolute_error: 1.2251
Epoch 33/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9281 - mean_absolute_error: 1.1795
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1084 - mean_absolute_error: 1.2446
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0169 - mean_absolute_error: 1.2151
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9780 - mean_absolute_error: 1.2067
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8991 - mean_absolute_error: 1.1734
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1019 - mean_absolute_error: 1.2494
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0851 - mean_absolute_error: 1.2401
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9215 - mean_absolute_error: 1.1864
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1522 - mean_absolute_error: 1.2557
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9456 - mean_absolute_error: 1.1959
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9135 - mean_absolute_error: 1.1894
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9663 - mean_absolute_error: 1.1998
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0398 - mean_absolute_error: 1.2344
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0028 - mean_absolute_error: 1.2145
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1197 - mean_absolute_error: 1.2544
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9601 - mean_absolute_error: 1.2036
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0986 - mean_absolute_error: 1.2560
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0065 - mean_absolute_error: 1.2164
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0359 - mean_absolute_error: 1.2272
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9295 - mean_absolute_error: 1.1966
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0087 - mean_absolute_error: 1.2176
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1103 - mean_absolute_error: 1.2564
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9468 - mean_absolute_error: 1.1909
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0485 - mean_absolute_error: 1.2341
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0302 - mean_absolute_error: 1.2272
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9027 - mean_absolute_error: 1.1788
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9868 - mean_absolute_error: 1.2044
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0145 - mean_absolute_error: 1.2216
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9529 - mean_absolute_error: 1.1939
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0229 - mean_absolute_error: 1.2268
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9960 - mean_absolute_error: 1.2072
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0399 - mean_absolute_error: 1.2198
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8825 - mean_absolute_error: 1.1742
Epoch 66/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9905 - mean_absolute_error: 1.2152
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9771 - mean_absolute_error: 1.2083
Epoch 68/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0106 - mean_absolute_error: 1.2066
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9813 - mean_absolute_error: 1.2140
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9613 - mean_absolute_error: 1.2036
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9664 - mean_absolute_error: 1.2007
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0471 - mean_absolute_error: 1.2237
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0733 - mean_absolute_error: 1.2370
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8802 - mean_absolute_error: 1.1724
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9936 - mean_absolute_error: 1.2165
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0167 - mean_absolute_error: 1.2242
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9939 - mean_absolute_error: 1.2058
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0466 - mean_absolute_error: 1.2336
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9453 - mean_absolute_error: 1.2004
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9628 - mean_absolute_error: 1.2041
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9376 - mean_absolute_error: 1.1986
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9847 - mean_absolute_error: 1.2148
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0485 - mean_absolute_error: 1.2317
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9857 - mean_absolute_error: 1.2080
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9443 - mean_absolute_error: 1.1954
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0383 - mean_absolute_error: 1.2307
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9851 - mean_absolute_error: 1.2129
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0025 - mean_absolute_error: 1.2174
Epoch 89/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0991 - mean_absolute_error: 1.2600
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9955 - mean_absolute_error: 1.1956
Epoch 91/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9638 - mean_absolute_error: 1.1985
Epoch 92/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1258 - mean_absolute_error: 1.2477
Epoch 93/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0729 - mean_absolute_error: 1.2346
Epoch 94/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0062 - mean_absolute_error: 1.2229
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9180 - mean_absolute_error: 1.1843
Epoch 96/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0027 - mean_absolute_error: 1.2158
Epoch 97/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9584 - mean_absolute_error: 1.2005
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8840 - mean_absolute_error: 1.1746
Epoch 99/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9838 - mean_absolute_error: 1.2119
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0102 - mean_absolute_error: 1.2145
Out[124]:
<tensorflow.python.keras.callbacks.History at 0x1a565e53d0>
In [125]:
model7.evaluate(X_sd_pca, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[125]:
[2.0075135231018066, 1.2158657312393188]

B. -----------------------Adam Optimizer-----------------------

In [126]:
# Adam Neural Network regression model after pca

reg_model4 = Sequential()

# Input Layer

reg_model4.add(Dense (1, input_dim = 2, kernel_initializer = 'normal', activation = 'relu'))

# Output Layer

reg_model4.add(Dense(1, kernel_initializer = 'normal', activation = 'softmax'))

adam3 = optimizers.Adam(lr = 0.01)
reg_model4.compile(optimizer = adam3, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [127]:
reg_model4.summary()
Model: "sequential_12"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_48 (Dense)             (None, 1)                 3         
_________________________________________________________________
dense_49 (Dense)             (None, 1)                 2         
=================================================================
Total params: 5
Trainable params: 5
Non-trainable params: 0
_________________________________________________________________
In [128]:
history4 = reg_model4.fit(X_sd_pca, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0034 - mean_absolute_error: 1.2251
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0228 - mean_absolute_error: 1.2170
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0077 - mean_absolute_error: 1.2058
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0010 - mean_absolute_error: 1.2188
Epoch 5/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0505 - mean_absolute_error: 1.2279
Epoch 6/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1174 - mean_absolute_error: 1.2489
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0547 - mean_absolute_error: 1.2362
Epoch 8/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0092 - mean_absolute_error: 1.2216
Epoch 9/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0074 - mean_absolute_error: 1.2166
Epoch 10/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9670 - mean_absolute_error: 1.1871
Epoch 11/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0288 - mean_absolute_error: 1.2221
Epoch 12/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9861 - mean_absolute_error: 1.2065
Epoch 13/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9784 - mean_absolute_error: 1.2011
Epoch 14/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1546 - mean_absolute_error: 1.2647
Epoch 15/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0649 - mean_absolute_error: 1.2307
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9883 - mean_absolute_error: 1.2039
Epoch 17/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9216 - mean_absolute_error: 1.1922
Epoch 18/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0144 - mean_absolute_error: 1.2157
Epoch 19/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0232 - mean_absolute_error: 1.2254
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0405 - mean_absolute_error: 1.2199
Epoch 21/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0524 - mean_absolute_error: 1.2268
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9252 - mean_absolute_error: 1.1858
Epoch 23/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9467 - mean_absolute_error: 1.1908
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9344 - mean_absolute_error: 1.1965
Epoch 25/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9760 - mean_absolute_error: 1.2096
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0420 - mean_absolute_error: 1.2295
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9863 - mean_absolute_error: 1.2100
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9971 - mean_absolute_error: 1.1995
Epoch 29/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0796 - mean_absolute_error: 1.2377
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0450 - mean_absolute_error: 1.2297
Epoch 31/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0004 - mean_absolute_error: 1.2055
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9606 - mean_absolute_error: 1.2007
Epoch 33/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0536 - mean_absolute_error: 1.2421
Epoch 34/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0222 - mean_absolute_error: 1.2224
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0414 - mean_absolute_error: 1.2263
Epoch 36/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0461 - mean_absolute_error: 1.2365
Epoch 37/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9833 - mean_absolute_error: 1.2134
Epoch 38/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0384 - mean_absolute_error: 1.2357
Epoch 39/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9383 - mean_absolute_error: 1.1884
Epoch 40/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9657 - mean_absolute_error: 1.1981
Epoch 41/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9714 - mean_absolute_error: 1.2101
Epoch 42/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0345 - mean_absolute_error: 1.2264
Epoch 43/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9990 - mean_absolute_error: 1.2107
Epoch 44/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0051 - mean_absolute_error: 1.2125
Epoch 45/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8919 - mean_absolute_error: 1.1755
Epoch 46/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0133 - mean_absolute_error: 1.2222
Epoch 47/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1205 - mean_absolute_error: 1.2522
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9991 - mean_absolute_error: 1.2217
Epoch 49/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9821 - mean_absolute_error: 1.2117
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9161 - mean_absolute_error: 1.1834
Epoch 51/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0201 - mean_absolute_error: 1.2249
Epoch 52/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1143 - mean_absolute_error: 1.2446
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0421 - mean_absolute_error: 1.2208
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9436 - mean_absolute_error: 1.1958
Epoch 55/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0859 - mean_absolute_error: 1.2458
Epoch 56/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9534 - mean_absolute_error: 1.1924
Epoch 57/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0678 - mean_absolute_error: 1.2357
Epoch 58/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9521 - mean_absolute_error: 1.2043
Epoch 59/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9901 - mean_absolute_error: 1.2059
Epoch 60/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9946 - mean_absolute_error: 1.2135
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0522 - mean_absolute_error: 1.2352
Epoch 62/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0260 - mean_absolute_error: 1.2205
Epoch 63/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1257 - mean_absolute_error: 1.2572
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0236 - mean_absolute_error: 1.2175
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9982 - mean_absolute_error: 1.2197
Epoch 66/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9791 - mean_absolute_error: 1.2063
Epoch 67/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0399 - mean_absolute_error: 1.2173
Epoch 68/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9516 - mean_absolute_error: 1.1939
Epoch 69/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9336 - mean_absolute_error: 1.1922
Epoch 70/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8648 - mean_absolute_error: 1.1618
Epoch 71/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9517 - mean_absolute_error: 1.2022
Epoch 72/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0376 - mean_absolute_error: 1.2205
Epoch 73/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9969 - mean_absolute_error: 1.2072
Epoch 74/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9838 - mean_absolute_error: 1.2012
Epoch 75/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9350 - mean_absolute_error: 1.1924
Epoch 76/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0419 - mean_absolute_error: 1.2275
Epoch 77/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0111 - mean_absolute_error: 1.2160
Epoch 78/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9103 - mean_absolute_error: 1.1834
Epoch 79/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9569 - mean_absolute_error: 1.2023
Epoch 80/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0946 - mean_absolute_error: 1.2417
Epoch 81/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0132 - mean_absolute_error: 1.2104
Epoch 82/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0308 - mean_absolute_error: 1.2292
Epoch 83/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9188 - mean_absolute_error: 1.1840
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9822 - mean_absolute_error: 1.2045
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9264 - mean_absolute_error: 1.1925
Epoch 86/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0369 - mean_absolute_error: 1.2269
Epoch 87/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0751 - mean_absolute_error: 1.2266
Epoch 88/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0386 - mean_absolute_error: 1.2293
Epoch 89/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8734 - mean_absolute_error: 1.1720
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9978 - mean_absolute_error: 1.2112
Epoch 91/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9549 - mean_absolute_error: 1.2014
Epoch 92/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0644 - mean_absolute_error: 1.2346
Epoch 93/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9266 - mean_absolute_error: 1.1850
Epoch 94/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9722 - mean_absolute_error: 1.2167
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0600 - mean_absolute_error: 1.2336
Epoch 96/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0630 - mean_absolute_error: 1.2355
Epoch 97/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1321 - mean_absolute_error: 1.2625
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8193 - mean_absolute_error: 1.1564
Epoch 99/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9808 - mean_absolute_error: 1.2001
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0731 - mean_absolute_error: 1.2378
In [129]:
reg_model4.evaluate(X_sd_pca, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[129]:
[2.0075135231018066, 1.2158657312393188]

1. Adding Two Hidden Layers to Model

In [130]:
# Initialize Sequential model
model8 = Sequential()

# Input Layer
model8.add(Dense (1, input_dim = 2, kernel_initializer = 'normal', activation = 'relu'))

# Adding two Hidden layers
model8.add(Dense(10, activation='elu', kernel_initializer = 'normal'))    # 2nd layer
model8.add(Dense(4, activation='elu', kernel_initializer = 'normal'))    # 3rd layer

#Output layer
model8.add(Dense(1, activation='softmax', kernel_initializer = 'normal'))

adam4 = optimizers.Adam(lr = 0.01)
model8.compile(optimizer = adam4, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [131]:
model8.summary()
Model: "sequential_13"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_50 (Dense)             (None, 1)                 3         
_________________________________________________________________
dense_51 (Dense)             (None, 10)                20        
_________________________________________________________________
dense_52 (Dense)             (None, 4)                 44        
_________________________________________________________________
dense_53 (Dense)             (None, 1)                 5         
=================================================================
Total params: 72
Trainable params: 72
Non-trainable params: 0
_________________________________________________________________
In [132]:
model8.fit(X_sd_pca, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 1.8734 - mean_absolute_error: 1.1673
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0792 - mean_absolute_error: 1.2425
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1191 - mean_absolute_error: 1.2518
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0025 - mean_absolute_error: 1.2091
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9969 - mean_absolute_error: 1.2125
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1416 - mean_absolute_error: 1.2652
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9026 - mean_absolute_error: 1.1812
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0004 - mean_absolute_error: 1.2116
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8958 - mean_absolute_error: 1.1803
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0681 - mean_absolute_error: 1.2410
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9717 - mean_absolute_error: 1.2023
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0715 - mean_absolute_error: 1.2400
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9927 - mean_absolute_error: 1.2102
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0835 - mean_absolute_error: 1.2415
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0503 - mean_absolute_error: 1.2322
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0452 - mean_absolute_error: 1.2300
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0208 - mean_absolute_error: 1.2120
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9982 - mean_absolute_error: 1.2227
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9991 - mean_absolute_error: 1.2184
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0695 - mean_absolute_error: 1.2361
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9751 - mean_absolute_error: 1.2090
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0187 - mean_absolute_error: 1.2257
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0541 - mean_absolute_error: 1.2298
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9807 - mean_absolute_error: 1.2081
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0795 - mean_absolute_error: 1.2437
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0103 - mean_absolute_error: 1.2161
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9282 - mean_absolute_error: 1.1926
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0758 - mean_absolute_error: 1.2277
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9961 - mean_absolute_error: 1.2128
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0718 - mean_absolute_error: 1.2385
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0439 - mean_absolute_error: 1.2331
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0833 - mean_absolute_error: 1.2496
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0022 - mean_absolute_error: 1.2109
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0360 - mean_absolute_error: 1.2303
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9975 - mean_absolute_error: 1.2091
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0824 - mean_absolute_error: 1.2340
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9148 - mean_absolute_error: 1.1819
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9569 - mean_absolute_error: 1.1997
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0684 - mean_absolute_error: 1.2341
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0485 - mean_absolute_error: 1.2266
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1197 - mean_absolute_error: 1.2452
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9790 - mean_absolute_error: 1.2035
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9506 - mean_absolute_error: 1.1936
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0353 - mean_absolute_error: 1.2327
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0143 - mean_absolute_error: 1.2176
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9599 - mean_absolute_error: 1.1973
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0401 - mean_absolute_error: 1.2219
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0243 - mean_absolute_error: 1.2262
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0291 - mean_absolute_error: 1.2174
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0275 - mean_absolute_error: 1.2223
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9980 - mean_absolute_error: 1.2252
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9355 - mean_absolute_error: 1.1926
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0566 - mean_absolute_error: 1.2272
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0058 - mean_absolute_error: 1.2178
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0108 - mean_absolute_error: 1.2203
Epoch 56/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9548 - mean_absolute_error: 1.1861
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0069 - mean_absolute_error: 1.2179
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9669 - mean_absolute_error: 1.2051
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9851 - mean_absolute_error: 1.2123
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1316 - mean_absolute_error: 1.2564
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9929 - mean_absolute_error: 1.2042
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9158 - mean_absolute_error: 1.1879
Epoch 63/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9727 - mean_absolute_error: 1.2113
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9419 - mean_absolute_error: 1.1955
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0270 - mean_absolute_error: 1.2307
Epoch 66/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0077 - mean_absolute_error: 1.2236
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9577 - mean_absolute_error: 1.1971
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0530 - mean_absolute_error: 1.2363
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9378 - mean_absolute_error: 1.1808
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9798 - mean_absolute_error: 1.2069
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0541 - mean_absolute_error: 1.2276
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9282 - mean_absolute_error: 1.1896
Epoch 73/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9424 - mean_absolute_error: 1.1934
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9753 - mean_absolute_error: 1.2025
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0536 - mean_absolute_error: 1.2368
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8954 - mean_absolute_error: 1.1781
Epoch 77/100
34/34 [==============================] - 0s 3ms/step - loss: 2.0020 - mean_absolute_error: 1.2192
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0341 - mean_absolute_error: 1.2181
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0387 - mean_absolute_error: 1.2204
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0842 - mean_absolute_error: 1.2385
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9202 - mean_absolute_error: 1.1801
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9724 - mean_absolute_error: 1.2123
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9826 - mean_absolute_error: 1.2148
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0101 - mean_absolute_error: 1.2158
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9979 - mean_absolute_error: 1.2105
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9802 - mean_absolute_error: 1.2112
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0543 - mean_absolute_error: 1.2266
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0329 - mean_absolute_error: 1.2156
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9952 - mean_absolute_error: 1.2195
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9975 - mean_absolute_error: 1.2203
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0641 - mean_absolute_error: 1.2334
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9305 - mean_absolute_error: 1.1922
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9333 - mean_absolute_error: 1.1922
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9368 - mean_absolute_error: 1.1897
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0605 - mean_absolute_error: 1.2368
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0087 - mean_absolute_error: 1.2160
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0586 - mean_absolute_error: 1.2217
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0466 - mean_absolute_error: 1.2245
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0858 - mean_absolute_error: 1.2359
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0412 - mean_absolute_error: 1.2229
Out[132]:
<tensorflow.python.keras.callbacks.History at 0x1a568f8190>
In [133]:
model8.evaluate(X_sd_pca, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[133]:
[2.0075135231018066, 1.2158657312393188]

2. Adding Four Hidden Layers to Model

In [134]:
# Initialize Sequential model
model9 = Sequential()

# Input Layer
model9.add(Dense (1, input_dim = 2, kernel_initializer = 'normal', activation = 'relu'))

# Adding four Hidden layers
model9.add(Dense(10, activation='tanh', kernel_initializer = 'normal'))    # 2nd layer
model9.add(Dense(4, activation='tanh', kernel_initializer = 'normal'))     # 3rd layer

model9.add(Dense(5, activation='tanh', kernel_initializer = 'normal'))     # 4th layer
model9.add(Dense(12, activation='tanh', kernel_initializer = 'normal'))     # 5th layer


#Output layer
model9.add(Dense(1, activation='softmax', kernel_initializer = 'normal'))

adam5 = optimizers.Adam(lr = 0.01)
model9.compile(optimizer = adam5, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [135]:
model9.summary()
Model: "sequential_14"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_54 (Dense)             (None, 1)                 3         
_________________________________________________________________
dense_55 (Dense)             (None, 10)                20        
_________________________________________________________________
dense_56 (Dense)             (None, 4)                 44        
_________________________________________________________________
dense_57 (Dense)             (None, 5)                 25        
_________________________________________________________________
dense_58 (Dense)             (None, 12)                72        
_________________________________________________________________
dense_59 (Dense)             (None, 1)                 13        
=================================================================
Total params: 177
Trainable params: 177
Non-trainable params: 0
_________________________________________________________________
In [136]:
model9.fit(X_sd_pca, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 1.8948 - mean_absolute_error: 1.1666
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9771 - mean_absolute_error: 1.2087
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9442 - mean_absolute_error: 1.1996
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9422 - mean_absolute_error: 1.1904
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0350 - mean_absolute_error: 1.2301
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9328 - mean_absolute_error: 1.1873
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1636 - mean_absolute_error: 1.2704
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9648 - mean_absolute_error: 1.1956
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0470 - mean_absolute_error: 1.2167
Epoch 10/100
34/34 [==============================] - 0s 3ms/step - loss: 2.0540 - mean_absolute_error: 1.2358
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0332 - mean_absolute_error: 1.2294
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9902 - mean_absolute_error: 1.2031
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8996 - mean_absolute_error: 1.1745
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9852 - mean_absolute_error: 1.2066
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0527 - mean_absolute_error: 1.2299
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9939 - mean_absolute_error: 1.2099
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0877 - mean_absolute_error: 1.2440
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9689 - mean_absolute_error: 1.2032
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9289 - mean_absolute_error: 1.1933
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0510 - mean_absolute_error: 1.2328
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0177 - mean_absolute_error: 1.2272
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9944 - mean_absolute_error: 1.2016
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9227 - mean_absolute_error: 1.1744
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0021 - mean_absolute_error: 1.2133
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9981 - mean_absolute_error: 1.2155
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9682 - mean_absolute_error: 1.2031
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0819 - mean_absolute_error: 1.2298
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0998 - mean_absolute_error: 1.2437
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0000 - mean_absolute_error: 1.2179
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0483 - mean_absolute_error: 1.2197
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9681 - mean_absolute_error: 1.2030
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0391 - mean_absolute_error: 1.2192
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9877 - mean_absolute_error: 1.2110
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9238 - mean_absolute_error: 1.1849
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9869 - mean_absolute_error: 1.2015
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8990 - mean_absolute_error: 1.1816
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9686 - mean_absolute_error: 1.2022
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0314 - mean_absolute_error: 1.2168
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0353 - mean_absolute_error: 1.2212
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0317 - mean_absolute_error: 1.2234
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0132 - mean_absolute_error: 1.2071
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9266 - mean_absolute_error: 1.1973
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9701 - mean_absolute_error: 1.2048
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0102 - mean_absolute_error: 1.2086
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9449 - mean_absolute_error: 1.1947
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0506 - mean_absolute_error: 1.2180
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9488 - mean_absolute_error: 1.2013
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0194 - mean_absolute_error: 1.2194
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0228 - mean_absolute_error: 1.2190
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9827 - mean_absolute_error: 1.2049
Epoch 51/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1330 - mean_absolute_error: 1.2449
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0593 - mean_absolute_error: 1.2383
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0423 - mean_absolute_error: 1.2289
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9241 - mean_absolute_error: 1.1976
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9261 - mean_absolute_error: 1.1884
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9917 - mean_absolute_error: 1.2268
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0667 - mean_absolute_error: 1.2371
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0215 - mean_absolute_error: 1.2273
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0269 - mean_absolute_error: 1.2237
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0718 - mean_absolute_error: 1.2348
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9854 - mean_absolute_error: 1.2138
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0550 - mean_absolute_error: 1.2334
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9098 - mean_absolute_error: 1.1769
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0549 - mean_absolute_error: 1.2305
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9573 - mean_absolute_error: 1.2030
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9624 - mean_absolute_error: 1.2101
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0289 - mean_absolute_error: 1.2250
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9569 - mean_absolute_error: 1.1932
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9296 - mean_absolute_error: 1.1870
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0765 - mean_absolute_error: 1.2360
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0524 - mean_absolute_error: 1.2352
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9187 - mean_absolute_error: 1.1857
Epoch 73/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0512 - mean_absolute_error: 1.2229
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9786 - mean_absolute_error: 1.2055
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9569 - mean_absolute_error: 1.2009
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9794 - mean_absolute_error: 1.2036
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0161 - mean_absolute_error: 1.2071
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0941 - mean_absolute_error: 1.2519
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0376 - mean_absolute_error: 1.2230
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9669 - mean_absolute_error: 1.1993
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0450 - mean_absolute_error: 1.2259
Epoch 82/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9167 - mean_absolute_error: 1.1863
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0434 - mean_absolute_error: 1.2273
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0919 - mean_absolute_error: 1.2468
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0190 - mean_absolute_error: 1.2269
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9123 - mean_absolute_error: 1.1830
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1201 - mean_absolute_error: 1.2457
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9329 - mean_absolute_error: 1.1911
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9236 - mean_absolute_error: 1.1855
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0122 - mean_absolute_error: 1.2192
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0600 - mean_absolute_error: 1.2301
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9696 - mean_absolute_error: 1.2038
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9730 - mean_absolute_error: 1.2071
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9684 - mean_absolute_error: 1.2113
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0849 - mean_absolute_error: 1.2438
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9951 - mean_absolute_error: 1.2095
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1491 - mean_absolute_error: 1.2683
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9594 - mean_absolute_error: 1.1995
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0165 - mean_absolute_error: 1.2204
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9877 - mean_absolute_error: 1.2143
Out[136]:
<tensorflow.python.keras.callbacks.History at 0x1a54d16250>
In [137]:
model9.evaluate(X_sd_pca, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[137]:
[2.0075135231018066, 1.2158657312393188]

C. ----------------RMSProp Optimizer------------------

In [138]:
# Rmsprop Neural Network regression model after pca

reg_model5 = Sequential()

# Input Layer

reg_model5.add(Dense (1, input_dim = 2, kernel_initializer = 'normal', activation = 'relu'))

# Output Layer

reg_model5.add(Dense(1, kernel_initializer = 'normal', activation = 'softmax'))

rms3 = optimizers.RMSprop(lr = 0.01)
reg_model5.compile(optimizer = rms3, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [139]:
reg_model5.summary()
Model: "sequential_15"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_60 (Dense)             (None, 1)                 3         
_________________________________________________________________
dense_61 (Dense)             (None, 1)                 2         
=================================================================
Total params: 5
Trainable params: 5
Non-trainable params: 0
_________________________________________________________________
In [140]:
reg_model5.fit(X_sd_pca, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 1ms/step - loss: 1.9615 - mean_absolute_error: 1.2015
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0426 - mean_absolute_error: 1.2298
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1251 - mean_absolute_error: 1.2471
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0663 - mean_absolute_error: 1.2230
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0166 - mean_absolute_error: 1.2228
Epoch 6/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9854 - mean_absolute_error: 1.2063
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9721 - mean_absolute_error: 1.2063
Epoch 8/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0558 - mean_absolute_error: 1.2299
Epoch 9/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9895 - mean_absolute_error: 1.2152
Epoch 10/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0289 - mean_absolute_error: 1.2328
Epoch 11/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0910 - mean_absolute_error: 1.2485
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9426 - mean_absolute_error: 1.1987
Epoch 13/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0245 - mean_absolute_error: 1.2193
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9995 - mean_absolute_error: 1.2128
Epoch 15/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0452 - mean_absolute_error: 1.2284
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9896 - mean_absolute_error: 1.2146
Epoch 17/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9218 - mean_absolute_error: 1.1925
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0526 - mean_absolute_error: 1.2231
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0351 - mean_absolute_error: 1.2168
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0600 - mean_absolute_error: 1.2193
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1175 - mean_absolute_error: 1.2478
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9731 - mean_absolute_error: 1.1931
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9263 - mean_absolute_error: 1.1820
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9928 - mean_absolute_error: 1.2071
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0596 - mean_absolute_error: 1.2224
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1417 - mean_absolute_error: 1.2696
Epoch 27/100
34/34 [==============================] - 0s 3ms/step - loss: 2.1318 - mean_absolute_error: 1.2537
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9936 - mean_absolute_error: 1.2077
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9448 - mean_absolute_error: 1.1874
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0264 - mean_absolute_error: 1.2183
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9727 - mean_absolute_error: 1.2017
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0562 - mean_absolute_error: 1.2239
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0751 - mean_absolute_error: 1.2339
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9561 - mean_absolute_error: 1.1981
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0504 - mean_absolute_error: 1.2270
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0331 - mean_absolute_error: 1.2264
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9604 - mean_absolute_error: 1.1924
Epoch 38/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0239 - mean_absolute_error: 1.2137
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9542 - mean_absolute_error: 1.1987
Epoch 40/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9740 - mean_absolute_error: 1.2133
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9881 - mean_absolute_error: 1.2140
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9050 - mean_absolute_error: 1.1847
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0234 - mean_absolute_error: 1.2228
Epoch 44/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0258 - mean_absolute_error: 1.2327
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0537 - mean_absolute_error: 1.2381
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1587 - mean_absolute_error: 1.2629
Epoch 47/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9177 - mean_absolute_error: 1.1827
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9309 - mean_absolute_error: 1.1870
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0190 - mean_absolute_error: 1.2156
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0935 - mean_absolute_error: 1.2438
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0732 - mean_absolute_error: 1.2434
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0520 - mean_absolute_error: 1.2276
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0584 - mean_absolute_error: 1.2415
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9960 - mean_absolute_error: 1.2043
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9149 - mean_absolute_error: 1.1854
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0085 - mean_absolute_error: 1.2136
Epoch 57/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0070 - mean_absolute_error: 1.2103
Epoch 58/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9341 - mean_absolute_error: 1.1840
Epoch 59/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0601 - mean_absolute_error: 1.2233
Epoch 60/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0569 - mean_absolute_error: 1.2268
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0254 - mean_absolute_error: 1.2196
Epoch 62/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9462 - mean_absolute_error: 1.1910
Epoch 63/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9289 - mean_absolute_error: 1.1975
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0655 - mean_absolute_error: 1.2398
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0730 - mean_absolute_error: 1.2379
Epoch 66/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0212 - mean_absolute_error: 1.2194
Epoch 67/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9913 - mean_absolute_error: 1.1969
Epoch 68/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9996 - mean_absolute_error: 1.2215
Epoch 69/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0137 - mean_absolute_error: 1.2202
Epoch 70/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9526 - mean_absolute_error: 1.1978
Epoch 71/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9655 - mean_absolute_error: 1.2042
Epoch 72/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9993 - mean_absolute_error: 1.2127
Epoch 73/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0639 - mean_absolute_error: 1.2311
Epoch 74/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8946 - mean_absolute_error: 1.1798
Epoch 75/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0422 - mean_absolute_error: 1.2243
Epoch 76/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1129 - mean_absolute_error: 1.2544
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9953 - mean_absolute_error: 1.2169
Epoch 78/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0852 - mean_absolute_error: 1.2353
Epoch 79/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2269
Epoch 80/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0156 - mean_absolute_error: 1.2250
Epoch 81/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9925 - mean_absolute_error: 1.2120
Epoch 82/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1217 - mean_absolute_error: 1.2507
Epoch 83/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9452 - mean_absolute_error: 1.1924
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0876 - mean_absolute_error: 1.2427
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0082 - mean_absolute_error: 1.2147
Epoch 86/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0649 - mean_absolute_error: 1.2373
Epoch 87/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0537 - mean_absolute_error: 1.2269
Epoch 88/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9522 - mean_absolute_error: 1.2027
Epoch 89/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0094 - mean_absolute_error: 1.2122
Epoch 90/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0874 - mean_absolute_error: 1.2487
Epoch 91/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9333 - mean_absolute_error: 1.1868
Epoch 92/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9238 - mean_absolute_error: 1.1894
Epoch 93/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0671 - mean_absolute_error: 1.2371
Epoch 94/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0013 - mean_absolute_error: 1.2158
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0000 - mean_absolute_error: 1.2117
Epoch 96/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0378 - mean_absolute_error: 1.2206
Epoch 97/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0084 - mean_absolute_error: 1.2140
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9866 - mean_absolute_error: 1.2045
Epoch 99/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0468 - mean_absolute_error: 1.2276
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9907 - mean_absolute_error: 1.2117
Out[140]:
<tensorflow.python.keras.callbacks.History at 0x1a520bc3d0>
In [141]:
reg_model5.evaluate(X_sd_pca, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[141]:
[2.0075135231018066, 1.2158657312393188]

1. Adding Two Hidden Layers to Model

In [142]:
# Initialize Sequential model
model10 = Sequential()

# Input Layer
model10.add(Dense (1, input_dim = 2, kernel_initializer = 'normal', activation = 'relu'))

# Adding two Hidden layers
model10.add(Dense(15, activation='sigmoid', kernel_initializer = 'normal'))    # 2nd layer
model10.add(Dense(10, activation='sigmoid', kernel_initializer = 'normal'))    # 3rd layer

#Output layer
model10.add(Dense(1, activation='softmax', kernel_initializer = 'normal'))

rms4 = optimizers.RMSprop(lr = 0.01)
model10.compile(optimizer = rms4, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [143]:
model10.summary()
Model: "sequential_16"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_62 (Dense)             (None, 1)                 3         
_________________________________________________________________
dense_63 (Dense)             (None, 15)                30        
_________________________________________________________________
dense_64 (Dense)             (None, 10)                160       
_________________________________________________________________
dense_65 (Dense)             (None, 1)                 11        
=================================================================
Total params: 204
Trainable params: 204
Non-trainable params: 0
_________________________________________________________________
In [144]:
model10.fit(X_sd_pca, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 2.0075 - mean_absolute_error: 1.2257
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9764 - mean_absolute_error: 1.2059
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0424 - mean_absolute_error: 1.2295
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0271 - mean_absolute_error: 1.2178
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9473 - mean_absolute_error: 1.1921
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8920 - mean_absolute_error: 1.1763
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0020 - mean_absolute_error: 1.2135
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1021 - mean_absolute_error: 1.2431
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9209 - mean_absolute_error: 1.1869
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9915 - mean_absolute_error: 1.2190
Epoch 11/100
34/34 [==============================] - 0s 3ms/step - loss: 1.9746 - mean_absolute_error: 1.1993
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0955 - mean_absolute_error: 1.2505
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0977 - mean_absolute_error: 1.2555
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9805 - mean_absolute_error: 1.2051
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9844 - mean_absolute_error: 1.2163
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1406 - mean_absolute_error: 1.2518
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0323 - mean_absolute_error: 1.2155
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9042 - mean_absolute_error: 1.1892
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9142 - mean_absolute_error: 1.1914
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0366 - mean_absolute_error: 1.2276
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9765 - mean_absolute_error: 1.2087
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9586 - mean_absolute_error: 1.2052
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0347 - mean_absolute_error: 1.2232
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9554 - mean_absolute_error: 1.1939
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0396 - mean_absolute_error: 1.2287
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9613 - mean_absolute_error: 1.1937
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0878 - mean_absolute_error: 1.2455
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0423 - mean_absolute_error: 1.2276
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0166 - mean_absolute_error: 1.2106
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9142 - mean_absolute_error: 1.1830
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0368 - mean_absolute_error: 1.2181
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0491 - mean_absolute_error: 1.2282
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0435 - mean_absolute_error: 1.2290
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0151 - mean_absolute_error: 1.2265
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9741 - mean_absolute_error: 1.2100
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0347 - mean_absolute_error: 1.2200
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0194 - mean_absolute_error: 1.2284
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0060 - mean_absolute_error: 1.2180
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9932 - mean_absolute_error: 1.2195
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1158 - mean_absolute_error: 1.2527
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0198 - mean_absolute_error: 1.2155
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0558 - mean_absolute_error: 1.2368
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9748 - mean_absolute_error: 1.2063
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0266 - mean_absolute_error: 1.2194
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0889 - mean_absolute_error: 1.2398
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9116 - mean_absolute_error: 1.1889
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8832 - mean_absolute_error: 1.1755
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1014 - mean_absolute_error: 1.2553
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9986 - mean_absolute_error: 1.2069
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0834 - mean_absolute_error: 1.2424
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9531 - mean_absolute_error: 1.1905
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0503 - mean_absolute_error: 1.2281
Epoch 53/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0550 - mean_absolute_error: 1.2338
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9883 - mean_absolute_error: 1.2011
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9137 - mean_absolute_error: 1.1839
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0200 - mean_absolute_error: 1.2255
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0571 - mean_absolute_error: 1.2257
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0113 - mean_absolute_error: 1.2165
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0834 - mean_absolute_error: 1.2323
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9951 - mean_absolute_error: 1.2166
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0682 - mean_absolute_error: 1.2341
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0046 - mean_absolute_error: 1.2208
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9941 - mean_absolute_error: 1.2158
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9837 - mean_absolute_error: 1.2134
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1204 - mean_absolute_error: 1.2520
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1717 - mean_absolute_error: 1.2692
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0020 - mean_absolute_error: 1.2064
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0371 - mean_absolute_error: 1.2286
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9788 - mean_absolute_error: 1.2052
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0339 - mean_absolute_error: 1.2193
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9689 - mean_absolute_error: 1.1799
Epoch 72/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0034 - mean_absolute_error: 1.2157
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0407 - mean_absolute_error: 1.2356
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9359 - mean_absolute_error: 1.1915
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9723 - mean_absolute_error: 1.2014
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0257 - mean_absolute_error: 1.2226
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9451 - mean_absolute_error: 1.1894
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9972 - mean_absolute_error: 1.2147
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0286 - mean_absolute_error: 1.2243
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0187 - mean_absolute_error: 1.2229
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0357 - mean_absolute_error: 1.2295
Epoch 82/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0025 - mean_absolute_error: 1.2145
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0509 - mean_absolute_error: 1.2357
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0203 - mean_absolute_error: 1.2178
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0699 - mean_absolute_error: 1.2471
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0384 - mean_absolute_error: 1.2315
Epoch 87/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9773 - mean_absolute_error: 1.2033
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8875 - mean_absolute_error: 1.1763
Epoch 89/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9849 - mean_absolute_error: 1.2117
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9831 - mean_absolute_error: 1.2087
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0531 - mean_absolute_error: 1.2452
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8793 - mean_absolute_error: 1.1733
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0948 - mean_absolute_error: 1.2467
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0263 - mean_absolute_error: 1.2178
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9319 - mean_absolute_error: 1.1918
Epoch 96/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0872 - mean_absolute_error: 1.2329
Epoch 97/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0948 - mean_absolute_error: 1.2488
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9429 - mean_absolute_error: 1.1938
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0166 - mean_absolute_error: 1.2275
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9703 - mean_absolute_error: 1.1996
Out[144]:
<tensorflow.python.keras.callbacks.History at 0x1a51cb5610>
In [145]:
model10.evaluate(X_sd_pca, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[145]:
[2.0075135231018066, 1.2158657312393188]

2. Adding Four Hidden Layers to Model

In [146]:
# Initialize Sequential model
model11 = Sequential()

# Input Layer
model11.add(Dense (1, input_dim = 2, kernel_initializer = 'normal', activation = 'relu'))

# Adding four Hidden layers
model11.add(Dense(15, activation='sigmoid', kernel_initializer = 'normal'))    # 2nd layer
model11.add(Dense(10, activation='sigmoid', kernel_initializer = 'normal'))    # 3rd layer

model11.add(Dense(10, activation='sigmoid', kernel_initializer = 'normal'))     # 4th layer
model11.add(Dense(20, activation='sigmoid', kernel_initializer = 'normal'))     # 5th layer


#Output layer
model11.add(Dense(1, activation='softmax', kernel_initializer = 'normal'))

rms5 = optimizers.RMSprop(lr = 0.01)
model11.compile(optimizer = rms5, loss = 'mean_squared_error', metrics = ['mean_absolute_error'])
In [147]:
model11.summary()
Model: "sequential_17"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_66 (Dense)             (None, 1)                 3         
_________________________________________________________________
dense_67 (Dense)             (None, 15)                30        
_________________________________________________________________
dense_68 (Dense)             (None, 10)                160       
_________________________________________________________________
dense_69 (Dense)             (None, 10)                110       
_________________________________________________________________
dense_70 (Dense)             (None, 20)                220       
_________________________________________________________________
dense_71 (Dense)             (None, 1)                 21        
=================================================================
Total params: 544
Trainable params: 544
Non-trainable params: 0
_________________________________________________________________
In [148]:
model11.fit(X_sd_pca, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 1.9463 - mean_absolute_error: 1.1990
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9956 - mean_absolute_error: 1.2209
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0237 - mean_absolute_error: 1.2164
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0022 - mean_absolute_error: 1.2147
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9710 - mean_absolute_error: 1.2074
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9354 - mean_absolute_error: 1.1925
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9660 - mean_absolute_error: 1.2035
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9123 - mean_absolute_error: 1.1906
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0358 - mean_absolute_error: 1.2298
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0774 - mean_absolute_error: 1.2310
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9734 - mean_absolute_error: 1.2095
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9525 - mean_absolute_error: 1.1954
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1132 - mean_absolute_error: 1.2481
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0250 - mean_absolute_error: 1.2220
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0219 - mean_absolute_error: 1.2156
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0538 - mean_absolute_error: 1.2315
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0415 - mean_absolute_error: 1.2354
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9851 - mean_absolute_error: 1.2143
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1040 - mean_absolute_error: 1.2418
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0774 - mean_absolute_error: 1.2369
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9939 - mean_absolute_error: 1.2127
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9345 - mean_absolute_error: 1.1779
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9912 - mean_absolute_error: 1.2160
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9516 - mean_absolute_error: 1.2013
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0987 - mean_absolute_error: 1.2438
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9825 - mean_absolute_error: 1.2071
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9812 - mean_absolute_error: 1.2069
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0280 - mean_absolute_error: 1.2213
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0328 - mean_absolute_error: 1.2246
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1009 - mean_absolute_error: 1.2519
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0492 - mean_absolute_error: 1.2386
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9091 - mean_absolute_error: 1.1805
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0367 - mean_absolute_error: 1.2222
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9812 - mean_absolute_error: 1.2076
Epoch 35/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0737 - mean_absolute_error: 1.2295
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0511 - mean_absolute_error: 1.2281
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9932 - mean_absolute_error: 1.2115
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0168 - mean_absolute_error: 1.2097
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0672 - mean_absolute_error: 1.2276
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8665 - mean_absolute_error: 1.1677
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0820 - mean_absolute_error: 1.2336
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0554 - mean_absolute_error: 1.2380
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9498 - mean_absolute_error: 1.1937
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0398 - mean_absolute_error: 1.2180
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9965 - mean_absolute_error: 1.2067
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9852 - mean_absolute_error: 1.2094
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0198 - mean_absolute_error: 1.2117
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0172 - mean_absolute_error: 1.2142
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0152 - mean_absolute_error: 1.2194
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0904 - mean_absolute_error: 1.2382
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9639 - mean_absolute_error: 1.2086
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0872 - mean_absolute_error: 1.2446
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9858 - mean_absolute_error: 1.2084
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0568 - mean_absolute_error: 1.2277
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9883 - mean_absolute_error: 1.2157
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0734 - mean_absolute_error: 1.2463
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9563 - mean_absolute_error: 1.1931
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0286 - mean_absolute_error: 1.2190
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0394 - mean_absolute_error: 1.2268
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8580 - mean_absolute_error: 1.1589
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9750 - mean_absolute_error: 1.2135
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0317 - mean_absolute_error: 1.2202
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0079 - mean_absolute_error: 1.2191
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9753 - mean_absolute_error: 1.2047
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1188 - mean_absolute_error: 1.2574
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1008 - mean_absolute_error: 1.2455
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0390 - mean_absolute_error: 1.2327
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0426 - mean_absolute_error: 1.2253
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9862 - mean_absolute_error: 1.2149
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0367 - mean_absolute_error: 1.2265
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0721 - mean_absolute_error: 1.2434
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9910 - mean_absolute_error: 1.2136
Epoch 73/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9638 - mean_absolute_error: 1.1997
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0179 - mean_absolute_error: 1.2208
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0463 - mean_absolute_error: 1.2254
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0506 - mean_absolute_error: 1.2377
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1439 - mean_absolute_error: 1.2571
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0560 - mean_absolute_error: 1.2332
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0448 - mean_absolute_error: 1.2291
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0235 - mean_absolute_error: 1.2198
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9587 - mean_absolute_error: 1.2060
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1342 - mean_absolute_error: 1.2562
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0438 - mean_absolute_error: 1.2302
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0622 - mean_absolute_error: 1.2297
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0910 - mean_absolute_error: 1.2429
Epoch 86/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0233 - mean_absolute_error: 1.2140
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0659 - mean_absolute_error: 1.2355
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8878 - mean_absolute_error: 1.1794
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9834 - mean_absolute_error: 1.2093
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9924 - mean_absolute_error: 1.2101
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1032 - mean_absolute_error: 1.2427
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9774 - mean_absolute_error: 1.2059
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0548 - mean_absolute_error: 1.2298
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9628 - mean_absolute_error: 1.2066
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9829 - mean_absolute_error: 1.2013
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0106 - mean_absolute_error: 1.2214
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0966 - mean_absolute_error: 1.2481
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1066 - mean_absolute_error: 1.2419
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0027 - mean_absolute_error: 1.2222
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0949 - mean_absolute_error: 1.2515
Out[148]:
<tensorflow.python.keras.callbacks.History at 0x1a4bacb550>
In [149]:
model11.evaluate(X_sd_pca, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 2.0075 - mean_absolute_error: 1.2159
Out[149]:
[2.0075135231018066, 1.2158657312393188]

----------------------------------XXXXXXXXXXXXXXXXXXXXXXXXXXXXX-------------------------------------

PART TWO // Classification Model Building

1. Import Data.

In [1172]:
df = pd.read_csv('Part- 1,2&3 - Signal.csv')
In [1173]:
df.head()
Out[1173]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Signal_Strength
0 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 5
1 7.8 0.88 0.00 2.6 0.098 25.0 67.0 0.9968 3.20 0.68 9.8 5
2 7.8 0.76 0.04 2.3 0.092 15.0 54.0 0.9970 3.26 0.65 9.8 5
3 11.2 0.28 0.56 1.9 0.075 17.0 60.0 0.9980 3.16 0.58 9.8 6
4 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 5
In [1174]:
df.tail()
Out[1174]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Signal_Strength
1594 6.2 0.600 0.08 2.0 0.090 32.0 44.0 0.99490 3.45 0.58 10.5 5
1595 5.9 0.550 0.10 2.2 0.062 39.0 51.0 0.99512 3.52 0.76 11.2 6
1596 6.3 0.510 0.13 2.3 0.076 29.0 40.0 0.99574 3.42 0.75 11.0 6
1597 5.9 0.645 0.12 2.0 0.075 32.0 44.0 0.99547 3.57 0.71 10.2 5
1598 6.0 0.310 0.47 3.6 0.067 18.0 42.0 0.99549 3.39 0.66 11.0 6
In [1175]:
df.shape
Out[1175]:
(1599, 12)
In [1176]:
df.size
Out[1176]:
19188
In [1177]:
df.isnull().sum()
Out[1177]:
Parameter 1        0
Parameter 2        0
Parameter 3        0
Parameter 4        0
Parameter 5        0
Parameter 6        0
Parameter 7        0
Parameter 8        0
Parameter 9        0
Parameter 10       0
Parameter 11       0
Signal_Strength    0
dtype: int64
In [1178]:
df.dtypes
Out[1178]:
Parameter 1        float64
Parameter 2        float64
Parameter 3        float64
Parameter 4        float64
Parameter 5        float64
Parameter 6        float64
Parameter 7        float64
Parameter 8        float64
Parameter 9        float64
Parameter 10       float64
Parameter 11       float64
Signal_Strength      int64
dtype: object

1.The dataset consists of (1599 entries & 12 columns).

2.On checking for lapses in the dataset we can conclude by saying that the data does not have any null values & does not have any major cleaning that needs to be done.

In [ ]:
 
In [1179]:
df.info()
<class 'pandas.core.frame.DataFrame'>
RangeIndex: 1599 entries, 0 to 1598
Data columns (total 12 columns):
 #   Column           Non-Null Count  Dtype  
---  ------           --------------  -----  
 0   Parameter 1      1599 non-null   float64
 1   Parameter 2      1599 non-null   float64
 2   Parameter 3      1599 non-null   float64
 3   Parameter 4      1599 non-null   float64
 4   Parameter 5      1599 non-null   float64
 5   Parameter 6      1599 non-null   float64
 6   Parameter 7      1599 non-null   float64
 7   Parameter 8      1599 non-null   float64
 8   Parameter 9      1599 non-null   float64
 9   Parameter 10     1599 non-null   float64
 10  Parameter 11     1599 non-null   float64
 11  Signal_Strength  1599 non-null   int64  
dtypes: float64(11), int64(1)
memory usage: 150.0 KB
In [1180]:
plt.figure(figsize=(14,9))
sns.boxplot(data = df, orient = 'h', palette = 'Set1', dodge = False);

Observation:

From the above boxplot we can see that there are outliers are present in mostly all columns. I will be finding the outliers

2. Data Analysis & Visualisation.

Analysing each attribute with the help of plots.

A. Parameter 1

In [1181]:
# Plotting a visual analysis of parameter 1

fig,(ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 1'], ax = ax1, color = 'red')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 1', fontsize = 15)

sns.boxplot(df['Parameter 1'], ax = ax2, color = 'red')
ax2.set_title('Box Plot', fontsize = 15)
ax2.set_xlabel('Parameter 1', fontsize = 15)
Out[1181]:
Text(0.5, 0, 'Parameter 1')
In [1182]:
# Checking outliers in parameter 1 

outlier_cols0 = []

Q1 = df['Parameter 1'].quantile(0.25)    # 1st Quartile
Q3 = df['Parameter 1'].quantile(0.75)    # 3rd Quartile

IQR = Q3 - Q1         # Interquartile range

LTV_para1 = Q1 - 1.5 * IQR               # Lower range bound
UTV_para1 = Q3 + 1.5 * IQR               # Upper range bound 


print('Interquartile range =', IQR)
print('Parameter 1 <', LTV_para1, 'and >', UTV_para1, 'are outliers')
print('Number of outliers in parameter 1 column below the lower whisker =', df[df['Parameter 1'] < (Q1 - (1.5*IQR))]['Parameter 1'].count())
print('Number of outliers in parameter 1 column above the upper whisker =', df[df['Parameter 1'] > (Q3 + (1.5*IQR))]['Parameter 1'].count())

outlier_cols0.append('Parameter 1')
upperLowerBound_Disct = {'Parameter 1' : UTV_para1}
Interquartile range = 2.0999999999999996
Parameter 1 < 3.95 and > 12.349999999999998 are outliers
Number of outliers in parameter 1 column below the lower whisker = 0
Number of outliers in parameter 1 column above the upper whisker = 49

Observation :

We can observe from the outlier analysis above that we have a total of 49 outliers in "Parameter 1" which is towards the upper whisker. We will treat them later on.

B. Parameter 2

In [1183]:
# Plotting a visual analysis of parameter 2

fig,(ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 2'], ax = ax1, color = 'b')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 2', fontsize = 15)

sns.boxplot(df['Parameter 2'], ax = ax2, color = 'b')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 2', fontsize = 15)
Out[1183]:
Text(0.5, 0, 'Parameter 2')
In [1184]:
# Checking outliers in parameter 2

outlier_cols1 = []

Q1 = df['Parameter 2'].quantile(0.25)    # 1st Quartile
Q3 = df['Parameter 2'].quantile(0.75)    # 3rd Quartile

IQR = Q3 - Q1         # Interquartile range

LTV_para2 = Q1 - 1.5 * IQR               # Lower range bound
UTV_para2 = Q3 + 1.5 * IQR               # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 2 <', LTV_para2, 'and >', UTV_para2, 'are outliers')
print('Number of outliers in the parameter 2 column below the lower whisker =', df[df['Parameter 2'] < (Q1 - (1.5*IQR))]['Parameter 2'].count())
print('Number of outliers in the parameter 2 column above the upper whisker =', df[df['Parameter 2'] > (Q3 + (1.5*IQR))]['Parameter 2'].count())

outlier_cols1.append('Parameter 2')
upperLowerBound_Disct = {'Parameter 2' : UTV_para2}
Interquartile range = 0.25
Parameter 2 < 0.015000000000000013 and > 1.0150000000000001 are outliers
Number of outliers in the parameter 2 column below the lower whisker = 0
Number of outliers in the parameter 2 column above the upper whisker = 19

Observation :

We can observe from the outlier analysis above that we have a total of 19 outliers in "Parameter 2" which is towards the upper whisker. We will treat them later on.

C. Parameter 3

In [1185]:
# Plotting a visual analysis of parameter 3

fig,(ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 3'], ax = ax1, color = 'green')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 3', fontsize = 15)

sns.boxplot(df['Parameter 3'], ax = ax2, color = 'green')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 3', fontsize = 15)
Out[1185]:
Text(0.5, 0, 'Parameter 3')
In [1186]:
# Checking outliers in parameter 3

outlier_cols2 = []

Q1 = df['Parameter 3'].quantile(0.25)     # 1st Quartile
Q3 = df['Parameter 3'].quantile(0.75)     # 3rd Quartile

IQR = Q3 - Q1            # Interquartile range

LTV_para3 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para3 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range = ', IQR)
print('Parameter 3 <', LTV_para3, 'and >', UTV_para3, 'are outliers')
print('Number of outliers in the parameter 3 column below the lower whisker =', df[df['Parameter 3'] < (Q1 - (1.5*IQR))]['Parameter 3'].count())
print('Number of outliers in the parameter 3 column above the upper whisker =', df[df['Parameter 3'] > (Q3 + (1.5*IQR))]['Parameter 3'].count())

outlier_cols2.append('Parameter 3')
upperLowerBound_Disct = {'Parameter 3' : UTV_para3}
Interquartile range =  0.32999999999999996
Parameter 3 < -0.4049999999999999 and > 0.9149999999999999 are outliers
Number of outliers in the parameter 3 column below the lower whisker = 0
Number of outliers in the parameter 3 column above the upper whisker = 1

Observation :

We can observe from the outlier analysis above that we have a total of 1 outlier in "Parameter 3" which is towards the upper whisker. We will treat them later on.

D. Parameter 4

In [1187]:
# Plotting a visual analysis of parameter 4

fig,(ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 4'], ax = ax1, color = 'purple')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 4', fontsize = 15)

sns.boxplot(df['Parameter 4'], ax = ax2, color = 'purple')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 4', fontsize = 15)
Out[1187]:
Text(0.5, 0, 'Parameter 4')
In [1188]:
# Checking outliers in parameter 4

outlier_cols3 = []

Q1 = df['Parameter 4'].quantile(0.25)     # 1st Quartile
Q3 = df['Parameter 4'].quantile(0.75)     # 3rd Quartile

IQR = Q3 - Q1            # Interquartile range

LTV_para4 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para4 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range = ', IQR)
print('Parameter 4 <', LTV_para4, 'and >', UTV_para4, 'are outliers')
print('Number of outliers in the parameter 4 column below the lower whisker =', df[df['Parameter 4'] < (Q1 - (1.5*IQR))]['Parameter 4'].count())
print('Number of outliers in the parameter 4 column above the upper whisker =', df[df['Parameter 4'] > (Q3 + (1.5*IQR))]['Parameter 4'].count())

outlier_cols3.append('Parameter 4')
upperLowerBound_Disct = {'Parameter 4' : UTV_para4}
Interquartile range =  0.7000000000000002
Parameter 4 < 0.8499999999999996 and > 3.6500000000000004 are outliers
Number of outliers in the parameter 4 column below the lower whisker = 0
Number of outliers in the parameter 4 column above the upper whisker = 155

Observation :

We can observe from the outlier analysis above that we have a total of 155 outliers in "Parameter 4" which is towards the upper whisker. We will treat them later on.

E. Parameter 5

In [1189]:
# Plotting a visual analysis of parameter 5

fig,(ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 5'], ax = ax1, color = 'orange')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 5', fontsize = 15)

sns.boxplot(df['Parameter 5'], ax = ax2, color = 'orange')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 5', fontsize = 15)
Out[1189]:
Text(0.5, 0, 'Parameter 5')
In [1190]:
# Checking outliers in parameter 5

outlier_cols4 = []

Q1 = df['Parameter 5'].quantile(0.25)     # 1st Quartile
Q3 = df['Parameter 5'].quantile(0.75)     # 3rd Quartile

IQR = Q3 - Q1            # Interquartile range

LTV_para5 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para5 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range = ', IQR)
print('Parameter 5 <', LTV_para5, 'and >', UTV_para5, 'are outliers')
print('Number of outliers in the parameter 5 column below the lower whisker =', df[df['Parameter 5'] < (Q1 - (1.5*IQR))]['Parameter 5'].count())
print('Number of outliers in the parameter 5 column above the upper whisker =', df[df['Parameter 5'] > (Q3 + (1.5*IQR))]['Parameter 5'].count())

outlier_cols4.append('Parameter 5')
upperLowerBound_Disct = {'Parameter 5' : UTV_para5}
Interquartile range =  0.01999999999999999
Parameter 5 < 0.04000000000000002 and > 0.11999999999999998 are outliers
Number of outliers in the parameter 5 column below the lower whisker = 9
Number of outliers in the parameter 5 column above the upper whisker = 103

Observation :

We can observe from the outlier analysis above that we have a total of 112 outlier in "Parameter 5", 9 towards the lower whisker and 103 towards the upper whisker. We will treat them later on.

F. Parameter 6

In [1191]:
# Plotting a visual analysis of parameter 6

fig, (ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 6'], ax = ax1, color = 'black')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 6', fontsize = 15)

sns.boxplot(df['Parameter 6'], ax = ax2, color = 'black')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 6', fontsize = 15)
Out[1191]:
Text(0.5, 0, 'Parameter 6')
In [1192]:
# Checking outliers in parameter 6

outlier_cols5 = []

Q1 = df['Parameter 6'].quantile(0.25)     # 1st quartile
Q3 = df['Parameter 6'].quantile(0.75)     # 3rd quartile

IQR = Q3 - Q1          #Interquartile range

LTV_para6 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para6 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 6 <', LTV_para6, ' and >', UTV_para6, 'are outliers')
print('Number of outliers in the parameter 6 column below the lower whisker =', df[df['Parameter 6'] < (Q1 - (1.5*IQR))]['Parameter 6'].count())
print('Number of outliers in the parameter 6 column above the upper whisker =', df[df['Parameter 6'] > (Q3 + (1.5*IQR))]['Parameter 6'].count())

outlier_cols5.append('Parameter 6')
upperLowerBand_Disct = {'Parameter 6' : UTV_para6}
Interquartile range = 14.0
Parameter 6 < -14.0  and > 42.0 are outliers
Number of outliers in the parameter 6 column below the lower whisker = 0
Number of outliers in the parameter 6 column above the upper whisker = 30

Observation :

We can observe from the outlier analysis above that we have a total of 30 outliers in "Parameter 6" which is towards the upper whisker. We will treat them later on.

G. Parameter 7

In [1193]:
# Plotting a visual analysis of parameter 7

fig, (ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 7'], ax = ax1, color = 'brown')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 7', fontsize = 15)

sns.boxplot(df['Parameter 7'], ax = ax2, color = 'brown')
ax2.set_title('Boxplot', fontsize= 15)
ax2.set_xlabel('Parameter 7', fontsize = 15)
Out[1193]:
Text(0.5, 0, 'Parameter 7')
In [1194]:
# Checking outliers in parameter 7

outlier_cols6 = []

Q1 = df['Parameter 7'].quantile(0.25)      # 1st quartile
Q3 = df['Parameter 7'].quantile(0.75)      # 3rd quartile

IQR = Q3 - Q1            # Interquartile range

LTV_para7 = Q1 - 1.5 * IQR                 # Lower range bound
UTV_para7 = Q3 + 1.5 * IQR                 # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 7 <', LTV_para7, 'and >', UTV_para7, 'are outliers')
print('Number of outliers in the parameter 7 column below the lower whisker =', df[df['Parameter 7'] < (Q1 - (1.5*IQR))]['Parameter 7'].count())
print('Number of outliers in the parameter 7 column above the upper whisker =', df[df['Parameter 7'] > (Q3 + (1.5*IQR))]['Parameter 7'].count())

outlier_cols6.append('Parameter 7')
upperLowerBound_Disct = {'Parameter 7' : UTV_para7}
Interquartile range = 40.0
Parameter 7 < -38.0 and > 122.0 are outliers
Number of outliers in the parameter 7 column below the lower whisker = 0
Number of outliers in the parameter 7 column above the upper whisker = 55

Observation :

We can observe from the outlier analysis above that we have a total of 55 outliers in "Parameter 6" which is towards the upper whisker. We will treat them later on.

H. Parameter 8

In [1195]:
# Plotting a visual analysis of parameter 8

fig,(ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 8'], ax = ax1, color = 'pink')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 8', fontsize = 15)

sns.boxplot(df['Parameter 8'], ax = ax2, color = 'pink')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 8', fontsize = 15)
Out[1195]:
Text(0.5, 0, 'Parameter 8')
In [1196]:
# Checking outliers in parameter 8

outlier_cols7 = []

Q1 = df['Parameter 8'].quantile(0.25)     # 1st quartile
Q3 = df['Parameter 8'].quantile(0.75)     # 3rd quartile

IQR = Q3 - Q1            # Interquartile range
 
LTV_para8 = Q1 - 1.5 * IQR                 # Lower range bound
UTV_para8 = Q3 + 1.5 * IQR                 # Upper range bound 


print('Interquartile range = ', IQR)
print('Parameter 8 <', LTV_para8, 'and >', UTV_para8, 'are outliers')
print('Number of outliers in the parameter 8 column below the lower whisker =', df[df['Parameter 8'] < (Q1 - (1.5*IQR))]['Parameter 8'].count())
print('Number of outliers in the parameter 8 column above the upper whisker =', df[df['Parameter 8'] > (Q3 + (1.5*IQR))]['Parameter 8'].count())

outlier_cols7.append('Parameter 8')
upperLowerBound_Disct = {'Parameter 8' : UTV_para8}
Interquartile range =  0.002234999999999876
Parameter 8 < 0.9922475000000002 and > 1.0011874999999997 are outliers
Number of outliers in the parameter 8 column below the lower whisker = 21
Number of outliers in the parameter 8 column above the upper whisker = 24

Observation :

We can observe from the outlier analysis above that we have a total of 45 outlier in "Parameter 8", 21 towards the lower whisker and 24 towards the upper whisker. We will treat them later on.

I. Parameter 9

In [1197]:
# Plotting a visual analysis of parameter 9

fig,(ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 9'], ax = ax1, color = 'grey')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 9', fontsize = 15)

sns.boxplot(df['Parameter 9'], ax = ax2, color = 'grey')
ax1.set_title('Boxplot', fontsize = 15)
ax1.set_xlabel('Parameter 9', fontsize = 15)
Out[1197]:
Text(0.5, 0, 'Parameter 9')
In [1198]:
# Checking outliers in parameter 9

outlier_cols8 = []

Q1 = df['Parameter 9'].quantile(0.25)      # 1st quantile
Q3 = df['Parameter 9'].quantile(0.75)      # 3rd quantile

IQR = Q3 - Q1            # Interquartile range

LTV_para9 = Q1 - 1.5 * IQR                  # Lower range bound
UTV_para9 = Q3 + 1.5 * IQR                  # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 9 <', LTV_para9, 'and >', UTV_para9, 'are outliers')
print('Number of outliers in the parameter 9 column below the lower whisker =', df[df['Parameter 9'] < (Q1 - (1.5*IQR))]['Parameter 9'].count())
print('Number of outliers in the parameter 9 column above the upper whisker =', df[df['Parameter 9'] > (Q3 + (1.5*IQR))]['Parameter 9'].count())

outlier_cols8.append('Parameter 9')
upperLowerBound_Disct = {'Parameter 9' : UTV_para9}
Interquartile range = 0.18999999999999995
Parameter 9 < 2.925 and > 3.6849999999999996 are outliers
Number of outliers in the parameter 9 column below the lower whisker = 14
Number of outliers in the parameter 9 column above the upper whisker = 21

Observation :

We can observe from the outlier analysis above that we have a total of 35 outlier in "Parameter 9", 14 towards the lower whisker and 21 towards the upper whisker. We will treat them later on.

J. Parameter 10

In [1199]:
# Plotting a visual analysis of parameter 10

fig, (ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 10'], ax = ax1, color = 'gold')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 10', fontsize = 15)

sns.boxplot(df['Parameter 10'], ax = ax2, color = 'gold')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 10', fontsize = 15)
Out[1199]:
Text(0.5, 0, 'Parameter 10')
In [1200]:
# Checking outliers in parameter 10

outlier_cols9 = []

Q1 = df['Parameter 10'].quantile(0.25)     # 1st quartile
Q3 = df['Parameter 10'].quantile(0.75)     # 3rd quartile

IQR = Q3 - Q1           # Interquartile range

LTV_para10 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para10 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 10 <', LTV_para10, 'and >', UTV_para10, 'are outliers')
print('Number of outliers in the parameter 10 column below the lower whisker =', df[df['Parameter 10'] < (Q1 - (1.5*IQR))]['Parameter 10'].count())
print('Number of outliers in the parameter 10 column below the upper whisker =', df[df['Parameter 10'] > (Q3 + (1.5*IQR))]['Parameter 10'].count())

outlier_cols9.append('Parameter 10')
upperLowerBound_Disct = {'Parameter 10' : UTV_para10}
Interquartile range = 0.17999999999999994
Parameter 10 < 0.28000000000000014 and > 0.9999999999999999 are outliers
Number of outliers in the parameter 10 column below the lower whisker = 0
Number of outliers in the parameter 10 column below the upper whisker = 59

Observation :

We can observe from the outlier analysis above that we have a total of 59 outliers in "Parameter 10" which is towards the upper whisker. We will treat them later on.

K. Parameter 11

In [1201]:
# Plotting a visual analysis of parameter 11

fig, (ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Parameter 11'], ax = ax1, color = 'white')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Parameter 11', fontsize = 15)

sns.boxplot(df['Parameter 11'], ax = ax2, color = 'white')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Parameter 11', fontsize = 15)
Out[1201]:
Text(0.5, 0, 'Parameter 11')
In [1202]:
# Checking outliers in parameter 11

outlier_cols10 = []

Q1 = df['Parameter 11'].quantile(0.25)     # 1st quartile
Q3 = df['Parameter 11'].quantile(0.75)     # 3rd quartile

IQR = Q3 - Q1           # Interquartile range

LTV_para11 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para11 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range =', IQR)
print('Parameter 11 <', LTV_para11, 'and >', UTV_para11, 'are outliers')
print('Number of outliers in the parameter 11 column below the lower whisker =', df[df['Parameter 11'] < (Q1 - (1.5*IQR))]['Parameter 11'].count())
print('Number of outliers in the parameter 11 column below the upper whisker =', df[df['Parameter 11'] > (Q3 + (1.5*IQR))]['Parameter 11'].count())

outlier_cols10.append('Parameter 11')
upperLowerBound_Disct = {'Parameter 11' : UTV_para11}
Interquartile range = 1.5999999999999996
Parameter 11 < 7.1000000000000005 and > 13.5 are outliers
Number of outliers in the parameter 11 column below the lower whisker = 0
Number of outliers in the parameter 11 column below the upper whisker = 13

Observation :

We can observe from the outlier analysis above that we have a total of 13 outliers in "Parameter 11" which is towards the upper whisker. We will treat them later on.

L. Signal_Strength

In [1203]:
# Plotting a visual analysis of signal_strength

fig, (ax1, ax2) = plt.subplots(nrows = 1, ncols = 2, figsize = (13,7))
fig.set_size_inches(20,7)

sns.distplot(df['Signal_Strength'], ax = ax1, color = 'yellow')
ax1.tick_params(labelsize = 15)
ax1.set_title('Distribution Plot', fontsize = 15)
ax1.set_xlabel('Signal_Strength', fontsize = 15)

sns.boxplot(df['Signal_Strength'], ax = ax2, color = 'yellow')
ax2.set_title('Boxplot', fontsize = 15)
ax2.set_xlabel('Signal_Strength', fontsize = 15)
Out[1203]:
Text(0.5, 0, 'Signal_Strength')
In [1204]:
# Checking outliers in signal_strength

outlier_cols11 = []

Q1 = df['Signal_Strength'].quantile(0.25)     # 1st quartile
Q3 = df['Signal_Strength'].quantile(0.75)     # 3rd quartile

IQR = Q3 - Q1           # Interquartile range

LTV_para12 = Q1 - 1.5 * IQR                # Lower range bound
UTV_para12 = Q3 + 1.5 * IQR                # Upper range bound


print('Interquartile range =', IQR)
print('Signal_Strength <', LTV_para12, 'and >', UTV_para12, 'are outliers')
print('Number of outliers in the Signal_Strength column below the lower whisker =', df[df['Signal_Strength'] < (Q1 - (1.5*IQR))]['Signal_Strength'].count())
print('Number of outliers in the Signal_Strength column below the upper whisker =', df[df['Signal_Strength'] > (Q3 + (1.5*IQR))]['Signal_Strength'].count())

outlier_cols11.append('Signal_Strength')
upperLowerBound_Disct = {'Signal_Strength' : UTV_para12}
Interquartile range = 1.0
Signal_Strength < 3.5 and > 7.5 are outliers
Number of outliers in the Signal_Strength column below the lower whisker = 10
Number of outliers in the Signal_Strength column below the upper whisker = 18

Observation :

We can observe from the outlier analysis above that we have a total of 28 outlier in "Signal_Strength", 10 towards the lower whisker and 18 towards the upper whisker. We will treat them later on.

In [1205]:
# Pairplot visual analysis to check correlation amongst different fields

sns.pairplot(df, diag_kind = 'kde');
In [1206]:
df.corr()

plt.figure(figsize = (18,12))
sns.heatmap(df.corr(), annot = True, fmt = 'g');

Observation : From the above pair plot & heatmap we can infer the relationship amongst the attributes and target column as follows:

  1. We have some strong negative correlations between (Parameter 1 & Parameter 9) (-0.68), (Parameter 3 & Parameter 9) (-0.54), (Parameter 2 & Parameter 3) (-0.55), (Parameter 8 & Parameter 11) (-0.49),
  1. Signal_Strength have some positive linear relation with (Parameter 11 that means if the quality of Parameter 11 goes up Signal_Strength will be more).
  1. We also have positive correlations between (Parameter 1 & Parameter 3) (0.67), (Parameter 1 & Parameter 8) (0.66), (Parameter 7 & Parameter 6) (0.66).
  1. Also it is quite visible that there are multiple gaussians in Parameter 3.
  1. Rest of the relations between other individual attributes are mostly cloud shaped or symmetrical shaped.

Pick one strategy to address the presence outliers and missing values and perform necessary imputation

------------------------ Fixing Outliers ------------------------

  • As we have seen above outlier are present in the given dataset.
  • There are multiple ways to deal with outliers but I mostly prefer either to drop the outliers or repalce it with median/mean.
  • Here I am going to replace the outliers with median because if we drop them, there may be chance to loose some important information which we dont want to as it could hamper our overall anylysis.
In [1207]:
# Showing the columns where outliers exist

print('These are the columns which have outliers : \n\n', outlier_cols0, outlier_cols1, outlier_cols2, outlier_cols3, outlier_cols4, outlier_cols5, outlier_cols6, outlier_cols7, outlier_cols8, outlier_cols9, outlier_cols10, outlier_cols11)
These are the columns which have outliers : 

 ['Parameter 1'] ['Parameter 2'] ['Parameter 3'] ['Parameter 4'] ['Parameter 5'] ['Parameter 6'] ['Parameter 7'] ['Parameter 8'] ['Parameter 9'] ['Parameter 10'] ['Parameter 11'] ['Signal_Strength']
In [1212]:
df_new = df.copy()
In [1213]:
# Treating outliers present in respective columns

for col_names in df_new.columns[:11]:
    q1 = df_new[col_names].quantile(0.25)
    q3 = df_new[col_names].quantile(0.75)
    iqr = q3 - q1
    low = q1 - 1.5*iqr
    high = q3 + 1.5*iqr
    
    df_new.loc[(df_new[col_names] < low) | (df_new[col_names] > high), col_names] = df_new[col_names].median()
In [1214]:
plt.figure(figsize=(15,8))
sns.boxplot(data = df_new, orient='h', palette='Set1', dodge=False);

Observation :

Now we can see from the above visual analysis of boxplots that most of the outliers are replaced with their median. We can see that most of the outliers are removed, but because of their gaussians replacing it with median values, the attributes raised with new outliers which we can ignore.

In [1215]:
df_new[['Signal_Strength']] = df_new[['Signal_Strength']].astype('category')
In [1216]:
df_new.shape
Out[1216]:
(1599, 12)
In [1217]:
df_new.size
Out[1217]:
19188
In [1218]:
df_new.head()
Out[1218]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Signal_Strength
0 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 5
1 7.8 0.88 0.00 2.6 0.098 25.0 67.0 0.9968 3.20 0.68 9.8 5
2 7.8 0.76 0.04 2.3 0.092 15.0 54.0 0.9970 3.26 0.65 9.8 5
3 11.2 0.28 0.56 1.9 0.075 17.0 60.0 0.9980 3.16 0.58 9.8 6
4 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 5
In [1219]:
df_new.count(axis = 0)
Out[1219]:
Parameter 1        1599
Parameter 2        1599
Parameter 3        1599
Parameter 4        1599
Parameter 5        1599
Parameter 6        1599
Parameter 7        1599
Parameter 8        1599
Parameter 9        1599
Parameter 10       1599
Parameter 11       1599
Signal_Strength    1599
dtype: int64
In [1220]:
df_new.dtypes
Out[1220]:
Parameter 1         float64
Parameter 2         float64
Parameter 3         float64
Parameter 4         float64
Parameter 5         float64
Parameter 6         float64
Parameter 7         float64
Parameter 8         float64
Parameter 9         float64
Parameter 10        float64
Parameter 11        float64
Signal_Strength    category
dtype: object

Feature Engineering Techniques

Creating Composite Features, ( "Parameter 2,3,4,9,10" ratio by dividing the values of "Parameter 9 with 2,3,10" )

In [1221]:
df_new.insert(df_new.shape[-1]-1,'Parameter 2,3,9 & 10',df_new['Parameter 9']/(df_new['Parameter 2'] + df_new['Parameter 3'] + df_new['Parameter 10']))
In [1222]:
df_new.head()
Out[1222]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Parameter 2,3,9 & 10 Signal_Strength
0 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 2.785714 5
1 7.8 0.88 0.00 2.6 0.098 25.0 67.0 0.9968 3.20 0.68 9.8 2.051282 5
2 7.8 0.76 0.04 2.3 0.092 15.0 54.0 0.9970 3.26 0.65 9.8 2.248276 5
3 11.2 0.28 0.56 1.9 0.075 17.0 60.0 0.9980 3.16 0.58 9.8 2.225352 6
4 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 2.785714 5

Creating Composite Features, ( "Parameter 1 & 11" ratio by dividing the values of "Parameter 11 with 1" )

In [1223]:
df_new.insert(df_new.shape[-1]-1, 'Parameter 1 & 11', df_new['Parameter 11']/df_new['Parameter 1'])
In [1224]:
df_new.head()
Out[1224]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Parameter 2,3,9 & 10 Parameter 1 & 11 Signal_Strength
0 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 2.785714 1.27027 5
1 7.8 0.88 0.00 2.6 0.098 25.0 67.0 0.9968 3.20 0.68 9.8 2.051282 1.25641 5
2 7.8 0.76 0.04 2.3 0.092 15.0 54.0 0.9970 3.26 0.65 9.8 2.248276 1.25641 5
3 11.2 0.28 0.56 1.9 0.075 17.0 60.0 0.9980 3.16 0.58 9.8 2.225352 0.87500 6
4 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 2.785714 1.27027 5

Creating Composite Features, ( "Parameter 4,5, & 8" ratio by dividing the values of "Parameter 4 with 8 & 5" )

In [1225]:
df_new.insert(df_new.shape[-1]-1,'Parameter 4,5 & 8',df_new['Parameter 4']/(df_new['Parameter 8'] + df_new['Parameter 5']))
In [1226]:
df_new.head()
Out[1226]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Parameter 2,3,9 & 10 Parameter 1 & 11 Parameter 4,5 & 8 Signal_Strength
0 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 2.785714 1.27027 1.769417 5
1 7.8 0.88 0.00 2.6 0.098 25.0 67.0 0.9968 3.20 0.68 9.8 2.051282 1.25641 2.374863 5
2 7.8 0.76 0.04 2.3 0.092 15.0 54.0 0.9970 3.26 0.65 9.8 2.248276 1.25641 2.112029 5
3 11.2 0.28 0.56 1.9 0.075 17.0 60.0 0.9980 3.16 0.58 9.8 2.225352 0.87500 1.770736 6
4 7.4 0.70 0.00 1.9 0.076 11.0 34.0 0.9978 3.51 0.56 9.4 2.785714 1.27027 1.769417 5
In [1227]:
df_new.shape
Out[1227]:
(1599, 15)
In [1228]:
df_new.size
Out[1228]:
23985
In [1229]:
df_new.isnull().sum()
Out[1229]:
Parameter 1             0
Parameter 2             0
Parameter 3             0
Parameter 4             0
Parameter 5             0
Parameter 6             0
Parameter 7             0
Parameter 8             0
Parameter 9             0
Parameter 10            0
Parameter 11            0
Parameter 2,3,9 & 10    0
Parameter 1 & 11        0
Parameter 4,5 & 8       0
Signal_Strength         0
dtype: int64
In [1230]:
### Splitting X-independent attributes and Y-dependent attributes and keeping the test set seperate

x = df_new.drop(['Signal_Strength'], axis = 1)
y = df_new[['Signal_Strength']]
In [1231]:
x.apply(zscore)
Out[1231]:
Parameter 1 Parameter 2 Parameter 3 Parameter 4 Parameter 5 Parameter 6 Parameter 7 Parameter 8 Parameter 9 Parameter 10 Parameter 11 Parameter 2,3,9 & 10 Parameter 1 & 11 Parameter 4,5 & 8
0 -0.506257 1.084072 -1.395226 -0.660402 -0.193503 -0.455657 -0.329932 0.648369 1.448448 -0.638601 -0.976125 1.066110 -0.167079 -0.661503
1 -0.237876 2.171450 -1.395226 0.980050 1.341692 1.076386 0.903736 0.039563 -0.783248 0.372245 -0.582843 -0.829895 -0.215210 0.889886
2 -0.237876 1.446531 -1.188918 0.276999 0.923002 -0.017931 0.417746 0.161324 -0.351307 0.119534 -0.582843 -0.321337 -0.215210 0.216403
3 2.043366 -1.453143 1.493090 -0.660402 -0.263285 0.200933 0.642049 0.770130 -1.071209 -0.470127 -0.582843 -0.380517 -1.539705 -0.658123
4 -0.506257 1.084072 -1.395226 -0.660402 -0.193503 -0.455657 -0.329932 0.648369 1.448448 -0.638601 -0.976125 1.066110 -0.167079 -0.661503
... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
1594 -1.311401 0.479973 -0.982609 -0.426051 0.783439 1.842407 0.043907 -1.117168 1.016507 -0.470127 0.105402 0.943177 1.302808 -0.471705
1595 -1.512687 0.177924 -0.879455 0.042649 -1.170445 2.608428 0.305594 -0.983231 1.520439 1.046143 0.793647 0.319357 2.013852 0.137217
1596 -1.244306 -0.063716 -0.724724 0.276999 -0.193503 1.514112 -0.105629 -0.605771 0.800537 0.961905 0.597006 0.226362 1.485063 0.303559
1597 -1.512687 0.751818 -0.776301 -0.426051 -0.263285 1.842407 0.043907 -0.770149 1.880390 0.624957 -0.189560 0.122859 1.425271 -0.408029
1598 -1.445592 -1.271913 1.028897 3.323553 -0.821537 0.310364 -0.030861 -0.757973 0.584566 0.203771 0.597006 -0.047970 1.788229 3.486623

1599 rows × 14 columns

In [1232]:
pca_model = PCA(n_components = 14)
pca_model.fit(x)
plt.step(list(range(1,15)), np.cumsum(pca_model.explained_variance_ratio_), where = 'mid')
plt.ylabel('Cummulation of Variance Explained')
plt.xlabel('Eigen Values')
plt.show()
np.cumsum(pca_model.explained_variance_ratio_)
Out[1232]:
array([0.93112343, 0.99527034, 0.99817434, 0.99941463, 0.99978568,
       0.99992121, 0.99996455, 0.99998237, 0.99999336, 0.99999865,
       0.99999939, 0.99999999, 1.        , 1.        ])
In [1233]:
cluster = range(1,12)
mean_distortions = []
for val in cluster:
    kmeans = KMeans(n_clusters = val)
    kmeans.fit(df_new)
    mean_distortions.append(sum(np.min(cdist(df_new, kmeans.cluster_centers_), axis = 1))/df_new.shape[0])
In [1234]:
plt.plot(cluster, mean_distortions,'bx-')
plt.xlabel('No. Of Clusters')
plt.ylabel('Distortion')
plt.title('Elbow Method')
Out[1234]:
Text(0.5, 1.0, 'Elbow Method')
In [1235]:
X_train1, X_test1, y_train, y_test = train_test_split(x, y, test_size=0.33, random_state=42)
In [1236]:
from sklearn.preprocessing import StandardScaler
X_train_sd = StandardScaler().fit_transform(X_train1)
X_test_sd = StandardScaler().fit_transform(X_test1)
In [1237]:
from sklearn.preprocessing import LabelEncoder
from sklearn import preprocessing

label_encoder = preprocessing.LabelEncoder().fit(y_train)
y_train = label_encoder.transform(y_train)

label_encoder = preprocessing.LabelEncoder().fit(y_test)
y_test = label_encoder.transform(y_test)
/Applications/anaconda3/lib/python3.7/site-packages/sklearn/utils/validation.py:72: DataConversionWarning: A column-vector y was passed when a 1d array was expected. Please change the shape of y to (n_samples, ), for example using ravel().
  return f(**kwargs)
In [1238]:
y_train = to_categorical(y_train, num_classes=10)
y_test = to_categorical(y_test, num_classes=10)

print("Shape of y_train:", y_train.shape)
print("One hot encoded value of y_train:", y_train[0])
Shape of y_train: (1071, 10)
One hot encoded value of y_train: [0. 0. 0. 1. 0. 0. 0. 0. 0. 0.]
In [1239]:
# generating the covariance matrix and the eigen values for the PCA analysis
cov_matrix = np.cov(X_train_sd.T) # the relevanat covariance matrix
print('Covariance Matrix \n%s', cov_matrix)
Covariance Matrix 
%s [[ 1.00093458e+00 -2.32410251e-01  5.99138340e-01  1.81366779e-01
   2.00770241e-01 -1.38106617e-01 -1.17948996e-01  5.45473970e-01
  -5.96016295e-01  1.51494592e-01 -4.07013904e-02 -5.60151049e-01
  -8.30272450e-01  1.65313015e-01]
 [-2.32410251e-01  1.00093458e+00 -5.59814698e-01  1.07922524e-02
   1.59415350e-01 -8.04582955e-03  8.01717410e-02 -6.46099117e-03
   1.90831513e-01 -3.16557532e-01 -2.20440731e-01 -4.42435372e-02
   7.51556530e-02  6.40629135e-04]
 [ 5.99138340e-01 -5.59814698e-01  1.00093458e+00  1.63410088e-01
   1.04595515e-01 -6.17874488e-02  6.47931774e-03  3.53543851e-01
  -5.06249440e-01  2.58523442e-01  1.32810983e-01 -6.73142800e-01
  -4.22172072e-01  1.55926053e-01]
 [ 1.81366779e-01  1.07922524e-02  1.63410088e-01  1.00093458e+00
   1.92466826e-01  1.45692815e-02  9.29586456e-02  3.59678476e-01
  -5.43814621e-02  5.42385359e-02  1.15811577e-01 -1.99847489e-01
  -1.06104282e-01  9.98448173e-01]
 [ 2.00770241e-01  1.59415350e-01  1.04595515e-01  1.92466826e-01
   1.00093458e+00 -1.72335571e-02  9.76281709e-02  3.46890200e-01
  -2.09875934e-01 -1.05024895e-01 -2.46927419e-01 -2.05274060e-01
  -3.24443172e-01  1.24685907e-01]
 [-1.38106617e-01 -8.04582955e-03 -6.17874488e-02  1.45692815e-02
  -1.72335571e-02  1.00093458e+00  6.04988594e-01 -1.08082109e-02
   7.98670247e-02  9.95177803e-03 -8.69243249e-02  7.49591853e-02
   6.98448118e-02  1.49663849e-02]
 [-1.17948996e-01  8.01717410e-02  6.47931774e-03  9.29586456e-02
   9.76281709e-02  6.04988594e-01  1.00093458e+00  1.20614180e-01
  -9.86908423e-03 -5.43373209e-02 -2.43726232e-01 -3.64337955e-02
  -8.16826311e-03  8.58413357e-02]
 [ 5.45473970e-01 -6.46099117e-03  3.53543851e-01  3.59678476e-01
   3.46890200e-01 -1.08082109e-02  1.20614180e-01  1.00093458e+00
  -2.60423216e-01  9.55323926e-02 -4.28426800e-01 -4.05307560e-01
  -6.58210784e-01  3.31662704e-01]
 [-5.96016295e-01  1.90831513e-01 -5.06249440e-01 -5.43814621e-02
  -2.09875934e-01  7.98670247e-02 -9.86908423e-03 -2.60423216e-01
   1.00093458e+00 -4.53678069e-03  1.36286469e-01  5.24025642e-01
   5.65431884e-01 -3.89333917e-02]
 [ 1.51494592e-01 -3.16557532e-01  2.58523442e-01  5.42385359e-02
  -1.05024895e-01  9.95177803e-03 -5.43373209e-02  9.55323926e-02
  -4.53678069e-03  1.00093458e+00  2.34612742e-01 -4.98927827e-01
  -1.77912190e-03  6.16802307e-02]
 [-4.07013904e-02 -2.20440731e-01  1.32810983e-01  1.15811577e-01
  -2.46927419e-01 -8.69243249e-02 -2.43726232e-01 -4.28426800e-01
   1.36286469e-01  2.34612742e-01  1.00093458e+00 -2.30174036e-02
   5.37935798e-01  1.37429549e-01]
 [-5.60151049e-01 -4.42435372e-02 -6.73142800e-01 -1.99847489e-01
  -2.05274060e-01  7.49591853e-02 -3.64337955e-02 -4.05307560e-01
   5.24025642e-01 -4.98927827e-01 -2.30174036e-02  1.00093458e+00
   4.52607989e-01 -1.86357839e-01]
 [-8.30272450e-01  7.51556530e-02 -4.22172072e-01 -1.06104282e-01
  -3.24443172e-01  6.98448118e-02 -8.16826311e-03 -6.58210784e-01
   5.65431884e-01 -1.77912190e-03  5.37935798e-01  4.52607989e-01
   1.00093458e+00 -8.01744441e-02]
 [ 1.65313015e-01  6.40629135e-04  1.55926053e-01  9.98448173e-01
   1.24685907e-01  1.49663849e-02  8.58413357e-02  3.31662704e-01
  -3.89333917e-02  6.16802307e-02  1.37429549e-01 -1.86357839e-01
  -8.01744441e-02  1.00093458e+00]]
In [1240]:
#generating the eigen values and the eigen vectors
e_vals, e_vecs = np.linalg.eig(cov_matrix)
print('Eigenvectors \n%s' %e_vecs)
print('\nEigenvalues \n%s' %e_vals)
Eigenvectors 
[[ 4.21104370e-01 -8.22452598e-02  1.28079100e-01 -6.84139299e-02
   1.11667098e-01  1.67205110e-02 -1.75744193e-01  5.08077278e-01
   7.25870052e-03 -3.31146817e-01  1.58787734e-01 -5.96996726e-01
  -4.13488723e-02  2.27604449e-03]
 [-1.17233892e-01  3.99690771e-01 -1.75907921e-02 -2.61333660e-01
  -5.59458407e-01  2.48971968e-01 -3.65615882e-01  1.39045605e-01
   1.39823898e-01  4.20102868e-02  9.83917079e-03 -3.38070557e-02
   4.55358020e-01  1.77282701e-03]
 [ 3.64053797e-01 -2.87910085e-01  2.04928987e-02  1.88562334e-01
   1.46556144e-01  1.39645284e-01  1.09587649e-01 -2.07592397e-01
   4.89763651e-01  3.02904163e-01  2.09721105e-01 -4.09751547e-02
   5.24623087e-01  1.63852616e-03]
 [ 1.96505160e-01  1.01110662e-01 -6.12901010e-01 -1.51518591e-01
   1.12117176e-01 -3.35694601e-03 -4.78724443e-02 -7.46302574e-02
  -1.26401829e-01  3.19726093e-02  9.38757925e-02 -1.57397991e-02
   1.05775147e-02 -7.11295922e-01]
 [ 1.85989690e-01  2.96053196e-01  7.16183470e-03 -1.35520717e-01
  -1.58536594e-01  3.30838233e-01  8.32248451e-01  1.17176031e-01
  -1.05989846e-01 -2.55749548e-02  5.82050399e-02 -2.95317652e-02
  -1.42706500e-03  4.73051218e-02]
 [-4.62487028e-02  2.29066656e-01 -1.19030397e-01  6.39860437e-01
   5.40799654e-02  7.05060478e-02 -5.77490869e-02  4.74902779e-01
  -1.88541518e-01  4.98326260e-01 -2.98175828e-02 -1.62481845e-02
  -2.48017711e-03  5.90887255e-04]
 [ 1.56334691e-02  3.36194502e-01 -1.23076153e-01  5.85672888e-01
  -8.00949332e-03  1.35132954e-01 -4.01967656e-02 -2.57954320e-01
   2.06835438e-01 -6.30252401e-01  3.63769275e-02  2.76338418e-02
  -1.03716785e-03  3.59558031e-04]
 [ 3.57765490e-01  2.46653088e-01 -3.84703220e-02 -4.97274397e-02
  -2.25713684e-02 -4.13459744e-01  6.80580133e-02  9.10502130e-02
   3.47463126e-01  4.73317263e-02 -7.04907195e-01  4.94693148e-02
  -2.14285057e-02  7.83583505e-03]
 [-3.35862467e-01  4.59758821e-02 -2.02458627e-01 -3.08808139e-02
  -1.31208013e-01 -4.70466652e-01  2.09962392e-01  2.85359204e-01
   5.07190290e-01 -9.67462312e-03  4.49785169e-01  1.00623381e-03
  -1.39379337e-01 -1.05218515e-03]
 [ 1.26035267e-01 -3.55427307e-01 -1.05748588e-01  2.44183019e-01
  -5.33095488e-01 -4.28429531e-01  1.54758553e-01 -2.05160847e-02
  -4.02032548e-01 -1.59933608e-01 -1.20937347e-02 -2.95387824e-02
   3.26550509e-01  1.29849855e-03]
 [-1.02116104e-01 -4.87349977e-01 -2.79940170e-01 -6.28621939e-03
  -3.89723715e-02  3.84697880e-01  2.22269281e-02  4.50026871e-01
   1.85213315e-01 -2.54158882e-01 -2.69716474e-01  3.86486248e-01
   1.03658616e-02  6.19552503e-05]
 [-3.70274067e-01  1.38209595e-01  5.51273557e-03 -9.49427003e-02
   5.30311137e-01 -1.80804540e-01  1.49243130e-01  1.59127928e-01
  -1.64653117e-01 -2.09233743e-01 -1.26307781e-01 -4.49981867e-02
   6.19961271e-01  3.93007166e-03]
 [-4.03560494e-01 -1.86875189e-01 -2.44597053e-01  6.85318480e-02
  -1.11513845e-01  1.68935504e-01  1.30392231e-01 -2.06512162e-01
   1.35924207e-01  1.09988229e-01 -3.43641982e-01 -6.95567471e-01
  -6.77170569e-02  2.44760011e-03]
 [ 1.83646618e-01  8.00457643e-02 -6.21268542e-01 -1.44598075e-01
   1.23552601e-01 -2.17650107e-02 -1.05420916e-01 -8.61518212e-02
  -1.24480068e-01  3.48849063e-02  1.00778421e-01 -9.70576076e-03
   4.77184953e-03  7.01229789e-01]]

Eigenvalues 
[4.12202279e+00 2.15358695e+00 2.01552718e+00 1.54934624e+00
 1.02318497e+00 8.61571440e-01 7.33545638e-01 4.97825513e-01
 4.45780873e-01 3.14693127e-01 2.52704108e-01 2.80666963e-02
 1.51377450e-02 9.08350207e-05]
In [1241]:
# the "cumulative variance explained" analysis 
tot = sum(e_vals)
var_exp = [( i /tot ) * 100 for i in sorted(e_vals, reverse=True)]
cum_var_exp = np.cumsum(var_exp)
print("Cumulative Variance Explained", cum_var_exp)
Cumulative Variance Explained [ 29.41552882  44.78392973  59.16711025  70.22353598  77.52517611
  83.67351172  88.90823116  92.46080752  95.64198355  97.8876928
  99.69103678  99.89132599  99.99935178 100.        ]
In [1242]:
# Plotting the variance expalained by the principal components and the cumulative variance explained.
plt.figure(figsize=(10 , 5))
plt.bar(range(1, e_vals.size + 1), var_exp, alpha = 0.5, align = 'center', label = 'Individual explained variance')
plt.step(range(1, e_vals.size + 1), cum_var_exp, where='mid', label = 'Cumulative explained variance')
plt.ylabel('Explained Variance Ratio')
plt.xlabel('Principal Components')
plt.legend(loc = 'best')
plt.tight_layout()
plt.show()
In [1243]:
eigen_pairs = [(np.abs(e_vals[i]), e_vecs[:,i]) for i in range(len(e_vals))]
eigen_pairs.sort(reverse=True)
eigen_pairs[:14]
Out[1243]:
[(4.122022794950574,
  array([ 0.42110437, -0.11723389,  0.3640538 ,  0.19650516,  0.18598969,
         -0.0462487 ,  0.01563347,  0.35776549, -0.33586247,  0.12603527,
         -0.1021161 , -0.37027407, -0.40356049,  0.18364662])),
 (2.1535869471343845,
  array([-0.08224526,  0.39969077, -0.28791009,  0.10111066,  0.2960532 ,
          0.22906666,  0.3361945 ,  0.24665309,  0.04597588, -0.35542731,
         -0.48734998,  0.1382096 , -0.18687519,  0.08004576])),
 (2.01552718417769,
  array([ 0.1280791 , -0.01759079,  0.0204929 , -0.61290101,  0.00716183,
         -0.1190304 , -0.12307615, -0.03847032, -0.20245863, -0.10574859,
         -0.27994017,  0.00551274, -0.24459705, -0.62126854])),
 (1.5493462377287601,
  array([-0.06841393, -0.26133366,  0.18856233, -0.15151859, -0.13552072,
          0.63986044,  0.58567289, -0.04972744, -0.03088081,  0.24418302,
         -0.00628622, -0.0949427 ,  0.06853185, -0.14459807])),
 (1.0231849727991553,
  array([ 0.1116671 , -0.55945841,  0.14655614,  0.11211718, -0.15853659,
          0.05407997, -0.00800949, -0.02257137, -0.13120801, -0.53309549,
         -0.03897237,  0.53031114, -0.11151385,  0.1235526 ])),
 (0.8615714400645825,
  array([ 0.01672051,  0.24897197,  0.13964528, -0.00335695,  0.33083823,
          0.07050605,  0.13513295, -0.41345974, -0.47046665, -0.42842953,
          0.38469788, -0.18080454,  0.1689355 , -0.02176501])),
 (0.7335456384355687,
  array([-0.17574419, -0.36561588,  0.10958765, -0.04787244,  0.83224845,
         -0.05774909, -0.04019677,  0.06805801,  0.20996239,  0.15475855,
          0.02222693,  0.14924313,  0.13039223, -0.10542092])),
 (0.4978255131447907,
  array([ 0.50807728,  0.13904561, -0.2075924 , -0.07463026,  0.11717603,
          0.47490278, -0.25795432,  0.09105021,  0.2853592 , -0.02051608,
          0.45002687,  0.15912793, -0.20651216, -0.08615182])),
 (0.4457808726391915,
  array([ 0.0072587 ,  0.1398239 ,  0.48976365, -0.12640183, -0.10598985,
         -0.18854152,  0.20683544,  0.34746313,  0.50719029, -0.40203255,
          0.18521332, -0.16465312,  0.13592421, -0.12448007])),
 (0.31469312652984427,
  array([-0.33114682,  0.04201029,  0.30290416,  0.03197261, -0.02557495,
          0.49832626, -0.6302524 ,  0.04733173, -0.00967462, -0.15993361,
         -0.25415888, -0.20923374,  0.10998823,  0.03488491])),
 (0.2527041082256977,
  array([ 0.15878773,  0.00983917,  0.20972111,  0.09387579,  0.05820504,
         -0.02981758,  0.03637693, -0.7049072 ,  0.44978517, -0.01209373,
         -0.26971647, -0.12630778, -0.34364198,  0.10077842])),
 (0.02806669633173831,
  array([-0.59699673, -0.03380706, -0.04097515, -0.0157398 , -0.02953177,
         -0.01624818,  0.02763384,  0.04946931,  0.00100623, -0.02953878,
          0.38648625, -0.04499819, -0.69556747, -0.00970576])),
 (0.015137744966902327,
  array([-0.04134887,  0.45535802,  0.52462309,  0.01057751, -0.00142707,
         -0.00248018, -0.00103717, -0.02142851, -0.13937934,  0.32655051,
          0.01036586,  0.61996127, -0.06771706,  0.00477185])),
 (9.083502065304852e-05,
  array([ 2.27604449e-03,  1.77282701e-03,  1.63852616e-03, -7.11295922e-01,
          4.73051218e-02,  5.90887255e-04,  3.59558031e-04,  7.83583505e-03,
         -1.05218515e-03,  1.29849855e-03,  6.19552503e-05,  3.93007166e-03,
          2.44760011e-03,  7.01229789e-01]))]
In [1244]:
# generating dimensionally reduced datasets
w = np.hstack((eigen_pairs[0][1].reshape(14,1), 
                      eigen_pairs[1][1].reshape(14,1)))
print('Matrix W:\n', w)
X_sd_pca = X_train_sd.dot(w)
X_test_sd_pca = X_test_sd.dot(w)
Matrix W:
 [[ 0.42110437 -0.08224526]
 [-0.11723389  0.39969077]
 [ 0.3640538  -0.28791009]
 [ 0.19650516  0.10111066]
 [ 0.18598969  0.2960532 ]
 [-0.0462487   0.22906666]
 [ 0.01563347  0.3361945 ]
 [ 0.35776549  0.24665309]
 [-0.33586247  0.04597588]
 [ 0.12603527 -0.35542731]
 [-0.1021161  -0.48734998]
 [-0.37027407  0.1382096 ]
 [-0.40356049 -0.18687519]
 [ 0.18364662  0.08004576]]
In [1245]:
X_train_sd.shape, w.shape, X_sd_pca.shape, X_test_sd_pca.shape
Out[1245]:
((1071, 14), (14, 2), (1071, 2), (528, 2))
In [1246]:
X_sd_pca, X_train_sd
Out[1246]:
(array([[ 2.37577805, -0.11579088],
        [-3.07041545, -0.53924261],
        [-1.65303526,  2.31916528],
        ...,
        [-1.08962381,  2.34631815],
        [-1.24284746, -3.83430216],
        [-3.85828245, -3.25440715]]),
 array([[-1.67185350e-01, -1.06169860e+00,  1.13839302e+00, ...,
         -1.05859058e+00, -3.01152034e-03,  9.95481094e-01],
        [-9.87286922e-01,  1.35318727e+00, -1.33018202e+00, ...,
          5.74394620e-01,  2.06699217e+00,  5.44003432e-01],
        [-1.05562872e+00,  6.28721512e-01, -1.38161067e+00, ...,
          1.53948068e+00,  4.25416116e-01, -8.93267766e-03],
        ...,
        [-6.45577934e-01,  5.68349365e-01, -1.07303879e+00, ...,
          1.31642642e+00,  7.40397212e-03,  1.25794613e+00],
        [-1.67185350e-01, -1.96728080e+00,  4.18391970e-01, ...,
          2.31736429e-01,  6.58678589e-01, -1.03844031e+00],
        [-1.60236310e+00, -1.42393148e+00, -4.44658515e-02, ...,
          1.92146479e+00,  3.52067845e+00, -1.08244993e+00]]))
In [1247]:
print(X_train_sd.shape)
print(y_train.shape)
print(X_sd_pca.shape)
print(y_test.shape)
(1071, 14)
(1071, 10)
(1071, 2)
(528, 10)
Now use x_train_sd, y_train for NN before PCA
Use x_sd_pca, y_train for NN after PCA

I ] Neural Network Models before PCA

A. -------------------SGD Optimizer--------------------

In [1266]:
# SGD Neural Network regression model before pca

class_model = Sequential()

# Input Layer

class_model.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Output Layer

class_model.add(Dense(10, kernel_initializer = 'normal', activation = 'softmax'))

sgd6 = optimizers.SGD(lr = 0.01)
class_model.compile(optimizer = sgd6, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1267]:
class_model.summary()
Model: "sequential_73"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_181 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_182 (Dense)            (None, 10)                100       
=================================================================
Total params: 235
Trainable params: 235
Non-trainable params: 0
_________________________________________________________________
In [1268]:
his = class_model.fit(X_train_sd, y_train, epochs = 100, verbose = 1)
Epoch 1/100
34/34 [==============================] - 0s 1ms/step - loss: 2.2842 - accuracy: 0.2593
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1923 - accuracy: 0.4208
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1093 - accuracy: 0.4712
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0315 - accuracy: 0.4750
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9630 - accuracy: 0.4950
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8901 - accuracy: 0.5052
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8305 - accuracy: 0.4977
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 1.7807 - accuracy: 0.4935
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 1.7241 - accuracy: 0.4893
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 1.6897 - accuracy: 0.5013
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 1.6315 - accuracy: 0.5330
Epoch 12/100
34/34 [==============================] - 0s 1ms/step - loss: 1.5848 - accuracy: 0.5168
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 1.5320 - accuracy: 0.5474
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 1.5036 - accuracy: 0.5382
Epoch 15/100
34/34 [==============================] - 0s 1ms/step - loss: 1.4751 - accuracy: 0.5412
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4434 - accuracy: 0.5520
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3941 - accuracy: 0.5599
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3641 - accuracy: 0.5664
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3356 - accuracy: 0.5809
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2913 - accuracy: 0.5626
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2604 - accuracy: 0.5721
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2377 - accuracy: 0.5755
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2192 - accuracy: 0.5836
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1993 - accuracy: 0.5605
Epoch 25/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2009 - accuracy: 0.5693
Epoch 26/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1609 - accuracy: 0.5763
Epoch 27/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1803 - accuracy: 0.5374
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0984 - accuracy: 0.5891
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0731 - accuracy: 0.6192
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1090 - accuracy: 0.5738
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0955 - accuracy: 0.5798
Epoch 32/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0587 - accuracy: 0.5867
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0676 - accuracy: 0.5887
Epoch 34/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0578 - accuracy: 0.5799
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0590 - accuracy: 0.5759
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0057 - accuracy: 0.5973
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0331 - accuracy: 0.5841
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0471 - accuracy: 0.5879
Epoch 39/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0208 - accuracy: 0.5967
Epoch 40/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0561 - accuracy: 0.5680
Epoch 41/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9861 - accuracy: 0.6034
Epoch 42/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0186 - accuracy: 0.5907
Epoch 43/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0280 - accuracy: 0.5974
Epoch 44/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0166 - accuracy: 0.5795
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0261 - accuracy: 0.5709
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0117 - accuracy: 0.6014
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9844 - accuracy: 0.6041
Epoch 48/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0187 - accuracy: 0.5930
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0418 - accuracy: 0.5944
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0052 - accuracy: 0.6123
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9949 - accuracy: 0.6069
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0062 - accuracy: 0.5865
Epoch 53/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9976 - accuracy: 0.5985
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0138 - accuracy: 0.5959
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9720 - accuracy: 0.6076
Epoch 56/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0057 - accuracy: 0.6185
Epoch 57/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9853 - accuracy: 0.6155
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9912 - accuracy: 0.6193
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0081 - accuracy: 0.6080
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9846 - accuracy: 0.6143
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0095 - accuracy: 0.6340
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9970 - accuracy: 0.6131
Epoch 63/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9886 - accuracy: 0.6106
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0209 - accuracy: 0.5993
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9894 - accuracy: 0.6070
Epoch 66/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9672 - accuracy: 0.6311
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0189 - accuracy: 0.5891
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0197 - accuracy: 0.5840
Epoch 69/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9612 - accuracy: 0.6081
Epoch 70/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9650 - accuracy: 0.6194
Epoch 71/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9441 - accuracy: 0.6187
Epoch 72/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9894 - accuracy: 0.6025
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9755 - accuracy: 0.6284
Epoch 74/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9699 - accuracy: 0.6351
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0315 - accuracy: 0.5644
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0178 - accuracy: 0.6073
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9428 - accuracy: 0.6258
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9939 - accuracy: 0.5894
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0297 - accuracy: 0.5935
Epoch 80/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0109 - accuracy: 0.5947
Epoch 81/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0028 - accuracy: 0.6182
Epoch 82/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9783 - accuracy: 0.6258
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9572 - accuracy: 0.6241
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9790 - accuracy: 0.6046
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9359 - accuracy: 0.6351
Epoch 86/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9607 - accuracy: 0.6134
Epoch 87/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9627 - accuracy: 0.5982
Epoch 88/100
34/34 [==============================] - ETA: 0s - loss: 1.1126 - accuracy: 0.62 - 0s 1ms/step - loss: 0.9993 - accuracy: 0.6065
Epoch 89/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9931 - accuracy: 0.6051
Epoch 90/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9256 - accuracy: 0.6361
Epoch 91/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9504 - accuracy: 0.6179
Epoch 92/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9504 - accuracy: 0.6169
Epoch 93/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9668 - accuracy: 0.6069
Epoch 94/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9393 - accuracy: 0.6203
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9584 - accuracy: 0.6169
Epoch 96/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9818 - accuracy: 0.6281
Epoch 97/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9427 - accuracy: 0.6086
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9122 - accuracy: 0.6495
Epoch 99/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9695 - accuracy: 0.5949
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9514 - accuracy: 0.6232
In [1269]:
class_model.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 0.9575 - accuracy: 0.6162
Out[1269]:
[0.9575030207633972, 0.6162465214729309]

1. Adding Two Hidden Layers to Model

In [1270]:
# Initialize Sequential model
model12 = Sequential()

# Input Layer
model12.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding two Hidden layers
model12.add(Dense(6, activation='tanh', kernel_initializer = 'normal'))    # 2nd layer
model12.add(Dense(5, activation='tanh', kernel_initializer = 'normal'))    # 3rd layer

#Output layer
model12.add(Dense(10, activation='softmax', kernel_initializer = 'normal'))

sgd7 = optimizers.SGD(lr = 0.01)
model12.compile(optimizer = sgd7, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1271]:
model12.summary()
Model: "sequential_74"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_183 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_184 (Dense)            (None, 6)                 60        
_________________________________________________________________
dense_185 (Dense)            (None, 5)                 35        
_________________________________________________________________
dense_186 (Dense)            (None, 10)                60        
=================================================================
Total params: 290
Trainable params: 290
Non-trainable params: 0
_________________________________________________________________
In [1272]:
model12.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 0s 1ms/step - loss: 2.2811 - accuracy: 0.3733 
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1973 - accuracy: 0.4170
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1175 - accuracy: 0.4249
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0405 - accuracy: 0.4380
Epoch 5/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9807 - accuracy: 0.4158
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9150 - accuracy: 0.4428
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8512 - accuracy: 0.4498
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8122 - accuracy: 0.4195
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 1.7513 - accuracy: 0.4455
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 1.7065 - accuracy: 0.4525
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 1.6668 - accuracy: 0.4446
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 1.6418 - accuracy: 0.4324
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 1.5842 - accuracy: 0.4418
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 1.5857 - accuracy: 0.4136
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 1.5276 - accuracy: 0.4328
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 1.5162 - accuracy: 0.4325
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4919 - accuracy: 0.4205
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4460 - accuracy: 0.4421
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4438 - accuracy: 0.4363
Epoch 20/100
34/34 [==============================] - 0s 1ms/step - loss: 1.4304 - accuracy: 0.4347
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4194 - accuracy: 0.4365
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3815 - accuracy: 0.4408
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4082 - accuracy: 0.4304
Epoch 24/100
34/34 [==============================] - 0s 1ms/step - loss: 1.3695 - accuracy: 0.4350
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3495 - accuracy: 0.4670
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3488 - accuracy: 0.4216
Epoch 27/100
34/34 [==============================] - 0s 1ms/step - loss: 1.3556 - accuracy: 0.4311
Epoch 28/100
34/34 [==============================] - 0s 3ms/step - loss: 1.3421 - accuracy: 0.4428
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3248 - accuracy: 0.4463
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3421 - accuracy: 0.4237
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2921 - accuracy: 0.4374
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3263 - accuracy: 0.4080
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3016 - accuracy: 0.4246
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3055 - accuracy: 0.4463
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2879 - accuracy: 0.4538
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2922 - accuracy: 0.4380
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2850 - accuracy: 0.4296
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2601 - accuracy: 0.4506
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2450 - accuracy: 0.4507
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2466 - accuracy: 0.4227
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2266 - accuracy: 0.4180
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2283 - accuracy: 0.4545
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2772 - accuracy: 0.4352
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2010 - accuracy: 0.4536
Epoch 45/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2375 - accuracy: 0.4319
Epoch 46/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2322 - accuracy: 0.4267
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2478 - accuracy: 0.4363
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2532 - accuracy: 0.4265
Epoch 49/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2245 - accuracy: 0.4306
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2506 - accuracy: 0.4415
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2333 - accuracy: 0.4586
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2353 - accuracy: 0.4310
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2645 - accuracy: 0.4444
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2384 - accuracy: 0.4123
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2373 - accuracy: 0.4427
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2369 - accuracy: 0.4223: 0s - loss: 1.2372 - accuracy: 0.42
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2415 - accuracy: 0.4313
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2306 - accuracy: 0.4379
Epoch 59/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2323 - accuracy: 0.4136
Epoch 60/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2317 - accuracy: 0.4405
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2441 - accuracy: 0.4054
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2264 - accuracy: 0.4062
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2078 - accuracy: 0.4349
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2125 - accuracy: 0.4386
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2043 - accuracy: 0.4556
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2066 - accuracy: 0.4355
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2066 - accuracy: 0.4516
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2538 - accuracy: 0.4300
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1914 - accuracy: 0.4342
Epoch 70/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2183 - accuracy: 0.4058
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1864 - accuracy: 0.4253
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2218 - accuracy: 0.4452
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2401 - accuracy: 0.4242
Epoch 74/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2165 - accuracy: 0.4328
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2277 - accuracy: 0.4295
Epoch 76/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2102 - accuracy: 0.4704
Epoch 77/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1891 - accuracy: 0.4688
Epoch 78/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2015 - accuracy: 0.4618
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2074 - accuracy: 0.4122
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1986 - accuracy: 0.4376
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2708 - accuracy: 0.3979
Epoch 82/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2816 - accuracy: 0.4132
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2052 - accuracy: 0.4352
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2149 - accuracy: 0.4331
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2033 - accuracy: 0.4420
Epoch 86/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1991 - accuracy: 0.4534
Epoch 87/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2025 - accuracy: 0.4360
Epoch 88/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2146 - accuracy: 0.4190
Epoch 89/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2293 - accuracy: 0.4352
Epoch 90/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1995 - accuracy: 0.4053
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2113 - accuracy: 0.4490
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2007 - accuracy: 0.4106
Epoch 93/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2542 - accuracy: 0.4118
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2335 - accuracy: 0.4081
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2452 - accuracy: 0.4108
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2393 - accuracy: 0.4219
Epoch 97/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1830 - accuracy: 0.4559
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1772 - accuracy: 0.4354
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2258 - accuracy: 0.4147
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1781 - accuracy: 0.4523
Out[1272]:
<tensorflow.python.keras.callbacks.History at 0x1a6228ec50>
In [1273]:
model12.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 1.2038 - accuracy: 0.4332
Out[1273]:
[1.203839659690857, 0.43323996663093567]

2. Adding Four Hidden Layers to Model

In [1274]:
# Initialize Sequential model
model13 = Sequential()

# Input Layer
model13.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding four Hidden layers
model13.add(Dense(10, activation='sigmoid', kernel_initializer = 'normal'))    # 2nd layer
model13.add(Dense(20, activation='sigmoid', kernel_initializer = 'normal'))    # 3rd layer

# Hidden layers
model13.add(Dense(30, activation='sigmoid', kernel_initializer = 'normal'))     # 4th layer
model13.add(Dense(15, activation='sigmoid', kernel_initializer = 'normal'))     # 5th layer


#Output layer
model13.add(Dense(10, activation='softmax', kernel_initializer = 'normal'))

sgd8 = optimizers.SGD(lr = 0.01)
model13.compile(optimizer = sgd8, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1275]:
model13.summary()
Model: "sequential_75"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_187 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_188 (Dense)            (None, 10)                100       
_________________________________________________________________
dense_189 (Dense)            (None, 20)                220       
_________________________________________________________________
dense_190 (Dense)            (None, 30)                630       
_________________________________________________________________
dense_191 (Dense)            (None, 15)                465       
_________________________________________________________________
dense_192 (Dense)            (None, 10)                160       
=================================================================
Total params: 1,710
Trainable params: 1,710
Non-trainable params: 0
_________________________________________________________________
In [1276]:
model13.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 2.1635 - accuracy: 0.3829
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8466 - accuracy: 0.4210
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.6214 - accuracy: 0.4242
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 1.5258 - accuracy: 0.4206
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4306 - accuracy: 0.4226
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3499 - accuracy: 0.4387
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3097 - accuracy: 0.4578
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3273 - accuracy: 0.4444
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2843 - accuracy: 0.4237
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2667 - accuracy: 0.4326
Epoch 11/100
34/34 [==============================] - ETA: 0s - loss: 1.2531 - accuracy: 0.45 - 0s 2ms/step - loss: 1.2538 - accuracy: 0.4512
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2117 - accuracy: 0.4427
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2626 - accuracy: 0.4161
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2544 - accuracy: 0.4270
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2246 - accuracy: 0.4275
Epoch 16/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2697 - accuracy: 0.4178
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2530 - accuracy: 0.4354
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2490 - accuracy: 0.4215
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1989 - accuracy: 0.4314
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2200 - accuracy: 0.4300
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1868 - accuracy: 0.4370
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2351 - accuracy: 0.4411
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2332 - accuracy: 0.4336
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2311 - accuracy: 0.4117
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1960 - accuracy: 0.4260
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2238 - accuracy: 0.4254
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2152 - accuracy: 0.4254
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1730 - accuracy: 0.4399
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1970 - accuracy: 0.4369
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1982 - accuracy: 0.4492
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1872 - accuracy: 0.4396
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2517 - accuracy: 0.4088
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2088 - accuracy: 0.4304
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2225 - accuracy: 0.4072
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1641 - accuracy: 0.4469
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2070 - accuracy: 0.4428
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2284 - accuracy: 0.4126
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2195 - accuracy: 0.4280
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1751 - accuracy: 0.4474
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2011 - accuracy: 0.4372
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1858 - accuracy: 0.4299
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2042 - accuracy: 0.4433
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1821 - accuracy: 0.4402
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1958 - accuracy: 0.4321
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2117 - accuracy: 0.4260
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2381 - accuracy: 0.4058
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2065 - accuracy: 0.4341
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1777 - accuracy: 0.4380
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1860 - accuracy: 0.4474
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2007 - accuracy: 0.4441
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1755 - accuracy: 0.4166
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2013 - accuracy: 0.4308
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1826 - accuracy: 0.4330
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1878 - accuracy: 0.4209
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2143 - accuracy: 0.4059
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1764 - accuracy: 0.4381
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1726 - accuracy: 0.4499
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2052 - accuracy: 0.4378
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1601 - accuracy: 0.4537
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1737 - accuracy: 0.4302
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2062 - accuracy: 0.4382
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1875 - accuracy: 0.4396
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2067 - accuracy: 0.4326
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2058 - accuracy: 0.4480
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1702 - accuracy: 0.4390
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1965 - accuracy: 0.4366
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2002 - accuracy: 0.4286
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1787 - accuracy: 0.4277
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1521 - accuracy: 0.4185
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2104 - accuracy: 0.4325
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1915 - accuracy: 0.4239
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1713 - accuracy: 0.4309
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1897 - accuracy: 0.4434
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1862 - accuracy: 0.4141
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1612 - accuracy: 0.4424
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1962 - accuracy: 0.4006
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1768 - accuracy: 0.4360
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1849 - accuracy: 0.4497
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1929 - accuracy: 0.4530
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1859 - accuracy: 0.4366
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1639 - accuracy: 0.4434
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1616 - accuracy: 0.4499
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2035 - accuracy: 0.4372
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1718 - accuracy: 0.4257
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1538 - accuracy: 0.4307
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1521 - accuracy: 0.4377
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1940 - accuracy: 0.4139
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2192 - accuracy: 0.4078
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1882 - accuracy: 0.4276
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1624 - accuracy: 0.4339
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1882 - accuracy: 0.4112
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1861 - accuracy: 0.4074
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1839 - accuracy: 0.4376
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2307 - accuracy: 0.4002
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1777 - accuracy: 0.4411
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1706 - accuracy: 0.4171
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1927 - accuracy: 0.4497
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1768 - accuracy: 0.4351
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2024 - accuracy: 0.4155
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1776 - accuracy: 0.4343
Out[1276]:
<tensorflow.python.keras.callbacks.History at 0x1a62451450>
In [1277]:
model13.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 2ms/step - loss: 1.1859 - accuracy: 0.4332
Out[1277]:
[1.1858723163604736, 0.43323996663093567]

B. --------------------Adam Optimizer---------------------

In [1303]:
# Adam Neural Network regression model before pca

class_model1 = Sequential()

# Input Layer

class_model1.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Output Layer

class_model1.add(Dense(10, kernel_initializer = 'normal', activation = 'softmax'))

adam6 = optimizers.Adam(lr = 0.01)
class_model1.compile(optimizer = adam6, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1304]:
class_model1.summary()
Model: "sequential_83"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_223 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_224 (Dense)            (None, 10)                100       
=================================================================
Total params: 235
Trainable params: 235
Non-trainable params: 0
_________________________________________________________________
In [1305]:
his1 = class_model1.fit(X_train_sd, y_train, epochs = 100, verbose = 1)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 2.0188 - accuracy: 0.4339
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1055 - accuracy: 0.5767
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0037 - accuracy: 0.6006
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9813 - accuracy: 0.6011
Epoch 5/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9331 - accuracy: 0.6350
Epoch 6/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9069 - accuracy: 0.6264
Epoch 7/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9461 - accuracy: 0.5974
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9067 - accuracy: 0.6563
Epoch 9/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9360 - accuracy: 0.6277
Epoch 10/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9182 - accuracy: 0.6033
Epoch 11/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9311 - accuracy: 0.6080
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9315 - accuracy: 0.6178
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8804 - accuracy: 0.6519
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9022 - accuracy: 0.6155
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8530 - accuracy: 0.6774
Epoch 16/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9000 - accuracy: 0.6233
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9273 - accuracy: 0.5952
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9234 - accuracy: 0.6019
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9034 - accuracy: 0.6160
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9058 - accuracy: 0.6159
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9079 - accuracy: 0.6222
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8762 - accuracy: 0.6397
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8566 - accuracy: 0.6569
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8937 - accuracy: 0.6206
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9027 - accuracy: 0.6224
Epoch 26/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8972 - accuracy: 0.6206
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9385 - accuracy: 0.6097
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9123 - accuracy: 0.6417
Epoch 29/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8792 - accuracy: 0.6268
Epoch 30/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8525 - accuracy: 0.6465
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8769 - accuracy: 0.6288
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8872 - accuracy: 0.6268
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8586 - accuracy: 0.6374
Epoch 34/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8941 - accuracy: 0.6329
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8722 - accuracy: 0.6178
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8457 - accuracy: 0.6437
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8703 - accuracy: 0.6393
Epoch 38/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8524 - accuracy: 0.6642
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8079 - accuracy: 0.6839
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8646 - accuracy: 0.6441
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8566 - accuracy: 0.6486
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8734 - accuracy: 0.6307
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8207 - accuracy: 0.6606
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8510 - accuracy: 0.6437
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8324 - accuracy: 0.6496
Epoch 46/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8565 - accuracy: 0.6391
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8416 - accuracy: 0.6638
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8658 - accuracy: 0.6349
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8184 - accuracy: 0.6346
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8806 - accuracy: 0.6309
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8414 - accuracy: 0.6435
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8771 - accuracy: 0.6272
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8896 - accuracy: 0.6281
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8935 - accuracy: 0.6346
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8336 - accuracy: 0.6324
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8067 - accuracy: 0.6628
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8393 - accuracy: 0.6532
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8478 - accuracy: 0.6572
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8149 - accuracy: 0.6534
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8196 - accuracy: 0.6640
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8388 - accuracy: 0.6660
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8530 - accuracy: 0.6540
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8387 - accuracy: 0.6346
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9040 - accuracy: 0.6185
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8418 - accuracy: 0.6352
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8565 - accuracy: 0.6426
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8839 - accuracy: 0.6282
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8150 - accuracy: 0.6583
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8138 - accuracy: 0.6632
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8466 - accuracy: 0.6319
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8342 - accuracy: 0.6415
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8358 - accuracy: 0.6481
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8356 - accuracy: 0.6360
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8107 - accuracy: 0.6690
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8147 - accuracy: 0.6418
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8217 - accuracy: 0.6548
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8402 - accuracy: 0.6435
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8221 - accuracy: 0.6526
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8296 - accuracy: 0.6503
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8035 - accuracy: 0.6733
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8100 - accuracy: 0.6684
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8507 - accuracy: 0.6310
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7949 - accuracy: 0.6564
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8083 - accuracy: 0.6473
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8125 - accuracy: 0.6518
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8128 - accuracy: 0.6554
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8121 - accuracy: 0.6462
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7982 - accuracy: 0.6547
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8091 - accuracy: 0.6806
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8123 - accuracy: 0.6348
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8302 - accuracy: 0.6310
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8165 - accuracy: 0.6503
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8185 - accuracy: 0.6584
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8259 - accuracy: 0.6175
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8258 - accuracy: 0.6498
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8178 - accuracy: 0.6556
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8386 - accuracy: 0.6503
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8342 - accuracy: 0.6344
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7727 - accuracy: 0.7033
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8263 - accuracy: 0.6493
In [1306]:
class_model1.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 0.8157 - accuracy: 0.6583
Out[1306]:
[0.8157033920288086, 0.6582633256912231]

1. Adding Two Hidden Layers to Model

In [1307]:
# Initialize Sequential model
model14 = Sequential()

# Input Layer
model14.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding two Hidden layers
model14.add(Dense(20, activation ='elu', kernel_initializer = 'normal'))    # 2nd layer
model14.add(Dense(40, activation ='elu', kernel_initializer = 'normal'))    # 3rd layer

#Output layer
model14.add(Dense(10, activation='softmax', kernel_initializer = 'normal'))

adam7 = optimizers.Adam(lr = 0.01)
model14.compile(optimizer = adam7, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1308]:
model14.summary()
Model: "sequential_84"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_225 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_226 (Dense)            (None, 20)                200       
_________________________________________________________________
dense_227 (Dense)            (None, 40)                840       
_________________________________________________________________
dense_228 (Dense)            (None, 10)                410       
=================================================================
Total params: 1,585
Trainable params: 1,585
Non-trainable params: 0
_________________________________________________________________
In [1309]:
model14.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 3ms/step - loss: 1.7487 - accuracy: 0.3939
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1351 - accuracy: 0.4654
Epoch 3/100
34/34 [==============================] - 0s 3ms/step - loss: 1.0565 - accuracy: 0.5539
Epoch 4/100
34/34 [==============================] - 0s 3ms/step - loss: 1.0517 - accuracy: 0.5757
Epoch 5/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9659 - accuracy: 0.5971
Epoch 6/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9261 - accuracy: 0.6209
Epoch 7/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9262 - accuracy: 0.6091
Epoch 8/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9529 - accuracy: 0.6083
Epoch 9/100
34/34 [==============================] - 0s 4ms/step - loss: 0.9262 - accuracy: 0.6245
Epoch 10/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9321 - accuracy: 0.6240
Epoch 11/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9712 - accuracy: 0.6059
Epoch 12/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9211 - accuracy: 0.6327
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9231 - accuracy: 0.6314
Epoch 14/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8846 - accuracy: 0.6516
Epoch 15/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9156 - accuracy: 0.6266
Epoch 16/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8445 - accuracy: 0.6596
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9159 - accuracy: 0.6328
Epoch 18/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8827 - accuracy: 0.6412
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8720 - accuracy: 0.6430
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8545 - accuracy: 0.6626
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8789 - accuracy: 0.6442
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9156 - accuracy: 0.6258
Epoch 23/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8651 - accuracy: 0.6491
Epoch 24/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8882 - accuracy: 0.6203
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8730 - accuracy: 0.6414
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8613 - accuracy: 0.6285
Epoch 27/100
34/34 [==============================] - 0s 5ms/step - loss: 0.8365 - accuracy: 0.6559
Epoch 28/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8582 - accuracy: 0.6511
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8901 - accuracy: 0.6481
Epoch 30/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8427 - accuracy: 0.6575
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8523 - accuracy: 0.6668
Epoch 32/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8480 - accuracy: 0.6657
Epoch 33/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8612 - accuracy: 0.6658
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8024 - accuracy: 0.6812
Epoch 35/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8453 - accuracy: 0.6583
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8259 - accuracy: 0.6697
Epoch 37/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8270 - accuracy: 0.6652
Epoch 38/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8629 - accuracy: 0.6330
Epoch 39/100
34/34 [==============================] - 0s 5ms/step - loss: 0.8904 - accuracy: 0.6510
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7939 - accuracy: 0.6848
Epoch 41/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8029 - accuracy: 0.6790
Epoch 42/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8288 - accuracy: 0.6459
Epoch 43/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8070 - accuracy: 0.6567
Epoch 44/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7683 - accuracy: 0.6935
Epoch 45/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8303 - accuracy: 0.6695
Epoch 46/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8329 - accuracy: 0.6591
Epoch 47/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8330 - accuracy: 0.6507
Epoch 48/100
34/34 [==============================] - 0s 5ms/step - loss: 0.7760 - accuracy: 0.6929
Epoch 49/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7882 - accuracy: 0.6744
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7927 - accuracy: 0.6671
Epoch 51/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7909 - accuracy: 0.6689
Epoch 52/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8133 - accuracy: 0.6785
Epoch 53/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8431 - accuracy: 0.6579
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8012 - accuracy: 0.6681
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7544 - accuracy: 0.6834
Epoch 56/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7784 - accuracy: 0.6688
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8304 - accuracy: 0.6443
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7855 - accuracy: 0.6874
Epoch 59/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7539 - accuracy: 0.6952
Epoch 60/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7824 - accuracy: 0.6796
Epoch 61/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7316 - accuracy: 0.7067
Epoch 62/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7823 - accuracy: 0.6826
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7660 - accuracy: 0.6799: 0s - loss: 0.7607 - accuracy: 0.68
Epoch 64/100
34/34 [==============================] - 0s 4ms/step - loss: 0.7600 - accuracy: 0.6713
Epoch 65/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7725 - accuracy: 0.6648
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7246 - accuracy: 0.6898
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7626 - accuracy: 0.7107
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7798 - accuracy: 0.6660
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7388 - accuracy: 0.6893
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7571 - accuracy: 0.6679
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6982 - accuracy: 0.7042
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7236 - accuracy: 0.6818
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7547 - accuracy: 0.6761
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7482 - accuracy: 0.6810
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6847 - accuracy: 0.7213
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6955 - accuracy: 0.7110
Epoch 77/100
34/34 [==============================] - 0s 8ms/step - loss: 0.7161 - accuracy: 0.7117
Epoch 78/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7402 - accuracy: 0.6898
Epoch 79/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7246 - accuracy: 0.7018
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6920 - accuracy: 0.7084
Epoch 81/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7035 - accuracy: 0.7264
Epoch 82/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7177 - accuracy: 0.6954
Epoch 83/100
34/34 [==============================] - 0s 3ms/step - loss: 0.6788 - accuracy: 0.7047
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6717 - accuracy: 0.7138
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6536 - accuracy: 0.7295
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7097 - accuracy: 0.6934
Epoch 87/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7106 - accuracy: 0.6980
Epoch 88/100
34/34 [==============================] - 0s 3ms/step - loss: 0.6431 - accuracy: 0.7218
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6488 - accuracy: 0.7320
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6327 - accuracy: 0.7494
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6661 - accuracy: 0.7258
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7037 - accuracy: 0.6894
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6064 - accuracy: 0.7495
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6993 - accuracy: 0.6989
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6992 - accuracy: 0.7050
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6795 - accuracy: 0.7130
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6581 - accuracy: 0.7353
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7070 - accuracy: 0.7008
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6444 - accuracy: 0.7240
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 0.5862 - accuracy: 0.7744
Out[1309]:
<tensorflow.python.keras.callbacks.History at 0x1a68d9a350>
In [1310]:
model14.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 0.5889 - accuracy: 0.7479
Out[1310]:
[0.5888606309890747, 0.7478991746902466]

2. Adding Four Hidden Layers to Model

In [1311]:
# Initialize Sequential model
model15 = Sequential()

# Input Layer
model15.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding four Hidden layers
model15.add(Dense(25, activation='tanh', kernel_initializer = 'normal'))    # 2nd layer
model15.add(Dense(10, activation='tanh', kernel_initializer = 'normal'))     # 3rd layer

model15.add(Dense(15, activation='tanh', kernel_initializer = 'normal'))     # 4th layer
model15.add(Dense(25, activation='tanh', kernel_initializer = 'normal'))     # 5th layer


#Output layer
model15.add(Dense(10, activation='softmax', kernel_initializer = 'normal'))

adam8 = optimizers.Adam(lr = 0.01)
model15.compile(optimizer = adam8, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1312]:
model15.summary()
Model: "sequential_85"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_229 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_230 (Dense)            (None, 25)                250       
_________________________________________________________________
dense_231 (Dense)            (None, 10)                260       
_________________________________________________________________
dense_232 (Dense)            (None, 15)                165       
_________________________________________________________________
dense_233 (Dense)            (None, 25)                400       
_________________________________________________________________
dense_234 (Dense)            (None, 10)                260       
=================================================================
Total params: 1,470
Trainable params: 1,470
Non-trainable params: 0
_________________________________________________________________
In [1313]:
model15.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 3ms/step - loss: 1.7970 - accuracy: 0.3689
Epoch 2/100
34/34 [==============================] - 0s 3ms/step - loss: 1.1938 - accuracy: 0.4429
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2143 - accuracy: 0.4128
Epoch 4/100
34/34 [==============================] - 0s 3ms/step - loss: 1.2111 - accuracy: 0.4284
Epoch 5/100
34/34 [==============================] - 0s 3ms/step - loss: 1.1696 - accuracy: 0.4363
Epoch 6/100
34/34 [==============================] - 0s 3ms/step - loss: 1.1424 - accuracy: 0.4647
Epoch 7/100
34/34 [==============================] - 0s 3ms/step - loss: 1.0605 - accuracy: 0.5815
Epoch 8/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9986 - accuracy: 0.5923
Epoch 9/100
34/34 [==============================] - 0s 3ms/step - loss: 1.0236 - accuracy: 0.5854
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9633 - accuracy: 0.6332
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9755 - accuracy: 0.5936
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9504 - accuracy: 0.6235
Epoch 13/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9351 - accuracy: 0.6349
Epoch 14/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9017 - accuracy: 0.6410
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8709 - accuracy: 0.6581
Epoch 16/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9633 - accuracy: 0.6160
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8884 - accuracy: 0.6544
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9039 - accuracy: 0.6191
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8955 - accuracy: 0.6372
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8791 - accuracy: 0.6675
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9366 - accuracy: 0.6220
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9400 - accuracy: 0.6228
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9024 - accuracy: 0.6501
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8883 - accuracy: 0.6655
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8961 - accuracy: 0.6493
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8746 - accuracy: 0.6332
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8699 - accuracy: 0.6554
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8970 - accuracy: 0.6498
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8991 - accuracy: 0.6224
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8700 - accuracy: 0.6407
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8906 - accuracy: 0.6639
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8500 - accuracy: 0.6658
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8696 - accuracy: 0.6747
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9117 - accuracy: 0.6506
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8954 - accuracy: 0.6298
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8874 - accuracy: 0.6529
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8648 - accuracy: 0.6517
Epoch 38/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8912 - accuracy: 0.6735
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8425 - accuracy: 0.6541
Epoch 40/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9189 - accuracy: 0.6480
Epoch 41/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8729 - accuracy: 0.6450
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8663 - accuracy: 0.6556
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8859 - accuracy: 0.6164
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8411 - accuracy: 0.6539
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8643 - accuracy: 0.6485
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8914 - accuracy: 0.6800
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8789 - accuracy: 0.6608
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8657 - accuracy: 0.6661
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9007 - accuracy: 0.6474
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9023 - accuracy: 0.6572
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8566 - accuracy: 0.6556
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8785 - accuracy: 0.6702
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8709 - accuracy: 0.6475
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8233 - accuracy: 0.6673
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9001 - accuracy: 0.6305
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8768 - accuracy: 0.6495
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8530 - accuracy: 0.6473
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8607 - accuracy: 0.6304
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8607 - accuracy: 0.6722
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8531 - accuracy: 0.6745
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8679 - accuracy: 0.6490
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8893 - accuracy: 0.6056
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8600 - accuracy: 0.6213
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8714 - accuracy: 0.6365
Epoch 65/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8026 - accuracy: 0.6750
Epoch 66/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8542 - accuracy: 0.6375
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8271 - accuracy: 0.6766
Epoch 68/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8543 - accuracy: 0.6434
Epoch 69/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8699 - accuracy: 0.6493
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9039 - accuracy: 0.6191
Epoch 71/100
34/34 [==============================] - 0s 4ms/step - loss: 0.9143 - accuracy: 0.6281
Epoch 72/100
34/34 [==============================] - 0s 4ms/step - loss: 0.8473 - accuracy: 0.6480
Epoch 73/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8724 - accuracy: 0.6193
Epoch 74/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8781 - accuracy: 0.6429
Epoch 75/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8947 - accuracy: 0.6608
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8481 - accuracy: 0.6489
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8518 - accuracy: 0.6637
Epoch 78/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8970 - accuracy: 0.6402
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8743 - accuracy: 0.6474
Epoch 80/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8530 - accuracy: 0.6525
Epoch 81/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8825 - accuracy: 0.6171
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8713 - accuracy: 0.6568
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8786 - accuracy: 0.6192
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8239 - accuracy: 0.6583
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8630 - accuracy: 0.6403
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8582 - accuracy: 0.6293
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8604 - accuracy: 0.5811
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8843 - accuracy: 0.6129
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8817 - accuracy: 0.6234
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9506 - accuracy: 0.5913
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8661 - accuracy: 0.6455
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8756 - accuracy: 0.6331
Epoch 93/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8338 - accuracy: 0.6667
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8425 - accuracy: 0.6733
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8178 - accuracy: 0.6710
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8910 - accuracy: 0.6290
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8723 - accuracy: 0.6293
Epoch 98/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8371 - accuracy: 0.6414
Epoch 99/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8579 - accuracy: 0.6401
Epoch 100/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8820 - accuracy: 0.6443
Out[1313]:
<tensorflow.python.keras.callbacks.History at 0x1a694b0e50>
In [1314]:
model15.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 2ms/step - loss: 0.8692 - accuracy: 0.6218
Out[1314]:
[0.8692255616188049, 0.6218487620353699]

C. --------------------RMSprop Optimizer---------------------

In [1318]:
# Rmsprop Neural Network regression model before pca

class_model2 = Sequential()

# Input Layer

class_model2.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Output Layer

class_model2.add(Dense(10, kernel_initializer = 'normal', activation = 'softmax'))

rms6 = optimizers.RMSprop(lr = 0.01)
class_model2.compile(optimizer = rms6, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1319]:
class_model2.summary()
Model: "sequential_86"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_235 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_236 (Dense)            (None, 10)                100       
=================================================================
Total params: 235
Trainable params: 235
Non-trainable params: 0
_________________________________________________________________
In [1320]:
his2 = class_model2.fit(X_train_sd, y_train, epochs = 100, verbose = 1)
Epoch 1/100
34/34 [==============================] - 1s 1ms/step - loss: 1.7538 - accuracy: 0.4386 
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0432 - accuracy: 0.6011
Epoch 3/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0143 - accuracy: 0.5920
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8940 - accuracy: 0.6286
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9547 - accuracy: 0.5960
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9302 - accuracy: 0.5993
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9348 - accuracy: 0.6154
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9088 - accuracy: 0.6064
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9616 - accuracy: 0.5995
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9164 - accuracy: 0.6104
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9143 - accuracy: 0.6164
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9273 - accuracy: 0.6317
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9141 - accuracy: 0.6236
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9149 - accuracy: 0.6124
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9063 - accuracy: 0.6336
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9473 - accuracy: 0.6083
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9072 - accuracy: 0.6245
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8952 - accuracy: 0.6359
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8744 - accuracy: 0.6428
Epoch 20/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8780 - accuracy: 0.6602
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8917 - accuracy: 0.6327
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8646 - accuracy: 0.6389
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9034 - accuracy: 0.6347
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8507 - accuracy: 0.6430
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8341 - accuracy: 0.6528
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8398 - accuracy: 0.6474
Epoch 27/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8366 - accuracy: 0.6627
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8888 - accuracy: 0.6119
Epoch 29/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8598 - accuracy: 0.6446
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8624 - accuracy: 0.6324
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8719 - accuracy: 0.6421
Epoch 32/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8693 - accuracy: 0.6407
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8870 - accuracy: 0.6434
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8997 - accuracy: 0.6093
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8748 - accuracy: 0.6380
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8501 - accuracy: 0.6507
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8630 - accuracy: 0.6337
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8817 - accuracy: 0.6385
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8660 - accuracy: 0.6231
Epoch 40/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9031 - accuracy: 0.6167
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8465 - accuracy: 0.6242
Epoch 42/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8570 - accuracy: 0.6465
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8578 - accuracy: 0.6458
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8883 - accuracy: 0.6117
Epoch 45/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8273 - accuracy: 0.6568
Epoch 46/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8645 - accuracy: 0.6242
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8902 - accuracy: 0.6327
Epoch 48/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8471 - accuracy: 0.6396
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8754 - accuracy: 0.6325
Epoch 50/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8520 - accuracy: 0.6175
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8905 - accuracy: 0.6240
Epoch 52/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8242 - accuracy: 0.6586
Epoch 53/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8129 - accuracy: 0.6746
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8531 - accuracy: 0.6536
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8538 - accuracy: 0.6509
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8470 - accuracy: 0.6441
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8509 - accuracy: 0.6334
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8370 - accuracy: 0.6511
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8223 - accuracy: 0.6693
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8354 - accuracy: 0.6508
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8185 - accuracy: 0.6703
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8047 - accuracy: 0.6747
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8479 - accuracy: 0.6523
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8798 - accuracy: 0.6305
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8647 - accuracy: 0.6412
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8456 - accuracy: 0.6576
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8357 - accuracy: 0.6540
Epoch 68/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8151 - accuracy: 0.6580
Epoch 69/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8901 - accuracy: 0.6256
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8315 - accuracy: 0.6410
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8545 - accuracy: 0.6379
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8635 - accuracy: 0.6216
Epoch 73/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8338 - accuracy: 0.6432
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8586 - accuracy: 0.6306
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8528 - accuracy: 0.6245
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8072 - accuracy: 0.6539
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8225 - accuracy: 0.6713
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8572 - accuracy: 0.6404
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8432 - accuracy: 0.6612
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8325 - accuracy: 0.6567
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8512 - accuracy: 0.6473
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8444 - accuracy: 0.6462
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8145 - accuracy: 0.6776
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8296 - accuracy: 0.6694
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8205 - accuracy: 0.6561
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8349 - accuracy: 0.6419
Epoch 87/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8406 - accuracy: 0.6661
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7998 - accuracy: 0.6721
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8057 - accuracy: 0.6627
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8327 - accuracy: 0.6476
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8499 - accuracy: 0.6425
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8247 - accuracy: 0.6640
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8509 - accuracy: 0.6360
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8472 - accuracy: 0.6395
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8092 - accuracy: 0.6502
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8455 - accuracy: 0.6406
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8490 - accuracy: 0.6236
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8431 - accuracy: 0.6554
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8212 - accuracy: 0.6662
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8014 - accuracy: 0.6743
In [1321]:
class_model2.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 0.8127 - accuracy: 0.6676
Out[1321]:
[0.8126540780067444, 0.6676003932952881]

1. Adding Two Hidden Layers to Model

In [1322]:
# Initialize Sequential model
model16 = Sequential()

# Input Layer
model16.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding two Hidden layers
model16.add(Dense(14, activation='sigmoid', kernel_initializer = 'normal'))    # 2nd layer
model16.add(Dense(28, activation='sigmoid', kernel_initializer = 'normal'))    # 3rd layer

#Output layer
model16.add(Dense(10, activation='softmax', kernel_initializer = 'normal'))

rms7 = optimizers.RMSprop(lr = 0.01)
model16.compile(optimizer = rms7, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1323]:
model16.summary()
Model: "sequential_87"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_237 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_238 (Dense)            (None, 14)                140       
_________________________________________________________________
dense_239 (Dense)            (None, 28)                420       
_________________________________________________________________
dense_240 (Dense)            (None, 10)                290       
=================================================================
Total params: 985
Trainable params: 985
Non-trainable params: 0
_________________________________________________________________
In [1324]:
model16.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 1.4883 - accuracy: 0.3593
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1789 - accuracy: 0.4484
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1134 - accuracy: 0.5215
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0178 - accuracy: 0.5867
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9889 - accuracy: 0.5866
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9754 - accuracy: 0.6147
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9279 - accuracy: 0.6183
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9294 - accuracy: 0.6310
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0017 - accuracy: 0.6003
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9865 - accuracy: 0.6186
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9398 - accuracy: 0.6128
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9191 - accuracy: 0.6104
Epoch 13/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9529 - accuracy: 0.6074
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9875 - accuracy: 0.6122
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9668 - accuracy: 0.6185
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9699 - accuracy: 0.6201
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8940 - accuracy: 0.6219
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9352 - accuracy: 0.6239
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9084 - accuracy: 0.6479
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9128 - accuracy: 0.6249
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9600 - accuracy: 0.6172
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9296 - accuracy: 0.6232
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9243 - accuracy: 0.6297
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9244 - accuracy: 0.6354
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9434 - accuracy: 0.6003
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9623 - accuracy: 0.6259
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9046 - accuracy: 0.6353
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8874 - accuracy: 0.6457
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8990 - accuracy: 0.6293
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9020 - accuracy: 0.6239
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8904 - accuracy: 0.6528
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8942 - accuracy: 0.6573
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9070 - accuracy: 0.6272
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8936 - accuracy: 0.6470
Epoch 35/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8780 - accuracy: 0.6569
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8833 - accuracy: 0.6298
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9001 - accuracy: 0.6308
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8953 - accuracy: 0.6252
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9127 - accuracy: 0.6193
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8921 - accuracy: 0.6299
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9376 - accuracy: 0.6169
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8729 - accuracy: 0.6354
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9114 - accuracy: 0.6163
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8604 - accuracy: 0.6440
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8842 - accuracy: 0.6603
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9212 - accuracy: 0.6366
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8952 - accuracy: 0.6294
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8549 - accuracy: 0.6537
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9130 - accuracy: 0.6204
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9267 - accuracy: 0.6312
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8708 - accuracy: 0.6383
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8983 - accuracy: 0.6394
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8397 - accuracy: 0.6519
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8460 - accuracy: 0.6533
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8884 - accuracy: 0.6283
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8242 - accuracy: 0.6877
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8467 - accuracy: 0.6763
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8551 - accuracy: 0.6658
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8535 - accuracy: 0.6563
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8559 - accuracy: 0.6566
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8665 - accuracy: 0.6459
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8727 - accuracy: 0.6398
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8862 - accuracy: 0.6383
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9018 - accuracy: 0.6417
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8617 - accuracy: 0.6332
Epoch 66/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8276 - accuracy: 0.6596
Epoch 67/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8392 - accuracy: 0.6506
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8664 - accuracy: 0.6757
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7976 - accuracy: 0.6963
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8519 - accuracy: 0.6883
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8237 - accuracy: 0.6726
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8501 - accuracy: 0.6720
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7977 - accuracy: 0.6869
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8194 - accuracy: 0.6929
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8475 - accuracy: 0.6645
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8571 - accuracy: 0.6726
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8790 - accuracy: 0.6499
Epoch 78/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8217 - accuracy: 0.6946
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8241 - accuracy: 0.6657
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8567 - accuracy: 0.6658
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8188 - accuracy: 0.6824
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8519 - accuracy: 0.6671
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8221 - accuracy: 0.6799
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8341 - accuracy: 0.6641
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8150 - accuracy: 0.6806
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8352 - accuracy: 0.6830
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8460 - accuracy: 0.6682
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8382 - accuracy: 0.6769
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8069 - accuracy: 0.6863
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8309 - accuracy: 0.6720
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8093 - accuracy: 0.7038
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8531 - accuracy: 0.6780
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8266 - accuracy: 0.6769
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8077 - accuracy: 0.6784
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7829 - accuracy: 0.6891
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8135 - accuracy: 0.6845
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7785 - accuracy: 0.6940
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8033 - accuracy: 0.6818
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7709 - accuracy: 0.6978
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8391 - accuracy: 0.6684
Out[1324]:
<tensorflow.python.keras.callbacks.History at 0x1a69558d10>
In [1325]:
model16.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 2ms/step - loss: 0.7937 - accuracy: 0.6863
Out[1325]:
[0.7936777472496033, 0.686274528503418]

2. Adding Four Hidden Layers to Model

In [1327]:
# Initialize Sequential model
model17 = Sequential()

# Input Layer
model17.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding four Hidden layers
model17.add(Dense(50, activation='sigmoid', kernel_initializer = 'normal'))    # 2nd layer
model17.add(Dense(15, activation='sigmoid', kernel_initializer = 'normal'))    # 3rd layer

model17.add(Dense(30, activation='sigmoid', kernel_initializer = 'normal'))     # 4th layer
model17.add(Dense(60, activation='sigmoid', kernel_initializer = 'normal'))     # 5th layer


#Output layer
model17.add(Dense(10, activation='softmax', kernel_initializer = 'normal'))

rms8 = optimizers.RMSprop(lr = 0.01)
model17.compile(optimizer = rms8, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1328]:
model17.summary()
Model: "sequential_89"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_247 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_248 (Dense)            (None, 50)                500       
_________________________________________________________________
dense_249 (Dense)            (None, 15)                765       
_________________________________________________________________
dense_250 (Dense)            (None, 30)                480       
_________________________________________________________________
dense_251 (Dense)            (None, 60)                1860      
_________________________________________________________________
dense_252 (Dense)            (None, 10)                610       
=================================================================
Total params: 4,350
Trainable params: 4,350
Non-trainable params: 0
_________________________________________________________________
In [1329]:
model17.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 1.3668 - accuracy: 0.3820
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1729 - accuracy: 0.4271
Epoch 3/100
34/34 [==============================] - 0s 3ms/step - loss: 1.1180 - accuracy: 0.4935
Epoch 4/100
34/34 [==============================] - 0s 3ms/step - loss: 1.0321 - accuracy: 0.5761
Epoch 5/100
34/34 [==============================] - 0s 3ms/step - loss: 1.0108 - accuracy: 0.5873
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0032 - accuracy: 0.5945
Epoch 7/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9927 - accuracy: 0.6085
Epoch 8/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9782 - accuracy: 0.6222
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9665 - accuracy: 0.6015
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9626 - accuracy: 0.6213
Epoch 11/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9365 - accuracy: 0.6168
Epoch 12/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9882 - accuracy: 0.5901
Epoch 13/100
34/34 [==============================] - 0s 3ms/step - loss: 1.0019 - accuracy: 0.5931
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9969 - accuracy: 0.5874
Epoch 15/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9879 - accuracy: 0.5908
Epoch 16/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9299 - accuracy: 0.6101
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8944 - accuracy: 0.6395
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9271 - accuracy: 0.6276
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9468 - accuracy: 0.6270
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9394 - accuracy: 0.6256
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9476 - accuracy: 0.6158
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9071 - accuracy: 0.6410
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9460 - accuracy: 0.6157
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9000 - accuracy: 0.6417
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9159 - accuracy: 0.6286
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9359 - accuracy: 0.6344
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8766 - accuracy: 0.6390
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9246 - accuracy: 0.6104
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8873 - accuracy: 0.6453
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9009 - accuracy: 0.6363
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8949 - accuracy: 0.6545
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8917 - accuracy: 0.6472
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9164 - accuracy: 0.6524
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8616 - accuracy: 0.6663
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8597 - accuracy: 0.6546
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8931 - accuracy: 0.6361
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8941 - accuracy: 0.6578
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8808 - accuracy: 0.6645
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8768 - accuracy: 0.6533
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9220 - accuracy: 0.6532
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8723 - accuracy: 0.6638
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8499 - accuracy: 0.6734
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8661 - accuracy: 0.6703
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8851 - accuracy: 0.6437
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8315 - accuracy: 0.6735
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8545 - accuracy: 0.6614
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8622 - accuracy: 0.6690
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8741 - accuracy: 0.6658
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8173 - accuracy: 0.6767
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8070 - accuracy: 0.6936
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8177 - accuracy: 0.6835
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8711 - accuracy: 0.6667
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8508 - accuracy: 0.6693
Epoch 54/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8606 - accuracy: 0.6585
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8478 - accuracy: 0.6570
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8502 - accuracy: 0.6630
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8739 - accuracy: 0.6629
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8192 - accuracy: 0.6946
Epoch 59/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8197 - accuracy: 0.6893
Epoch 60/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8244 - accuracy: 0.6762
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8085 - accuracy: 0.7009
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8479 - accuracy: 0.6664
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8803 - accuracy: 0.6458
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8457 - accuracy: 0.6793
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8104 - accuracy: 0.7054
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8334 - accuracy: 0.6886
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8438 - accuracy: 0.6561
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8104 - accuracy: 0.6964
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8107 - accuracy: 0.6945
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8453 - accuracy: 0.6782
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8196 - accuracy: 0.6960
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7846 - accuracy: 0.6931
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8146 - accuracy: 0.6779
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7799 - accuracy: 0.6955
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8489 - accuracy: 0.6720
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7983 - accuracy: 0.7080
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7888 - accuracy: 0.6968
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8138 - accuracy: 0.6880
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8472 - accuracy: 0.6654
Epoch 80/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8055 - accuracy: 0.6918
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7877 - accuracy: 0.7121
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7915 - accuracy: 0.7057
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7986 - accuracy: 0.7014
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7722 - accuracy: 0.7156
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8568 - accuracy: 0.6712
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8121 - accuracy: 0.7039
Epoch 87/100
34/34 [==============================] - 0s 4ms/step - loss: 0.7961 - accuracy: 0.6937
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7976 - accuracy: 0.6914
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7561 - accuracy: 0.7217
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8060 - accuracy: 0.6920
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7990 - accuracy: 0.6951
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7602 - accuracy: 0.7357
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7842 - accuracy: 0.7018
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7972 - accuracy: 0.7009
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7985 - accuracy: 0.7001
Epoch 96/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8108 - accuracy: 0.6977
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7971 - accuracy: 0.6983
Epoch 98/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8147 - accuracy: 0.6985
Epoch 99/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7366 - accuracy: 0.7179
Epoch 100/100
34/34 [==============================] - 0s 3ms/step - loss: 0.7441 - accuracy: 0.7256
Out[1329]:
<tensorflow.python.keras.callbacks.History at 0x1a6a292d10>
In [1330]:
model17.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 0.7640 - accuracy: 0.7171
Out[1330]:
[0.7639620304107666, 0.7170868515968323]

II ] Neural Network Models After PCA

A. -------------------SGD Optimizer--------------------

In [1345]:
# SGD Neural Network regression model after pca

class_model3 = Sequential()

# Input Layer

class_model3.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Output Layer

class_model3.add(Dense(10, kernel_initializer = 'normal', activation = 'softmax'))

sgd9 = optimizers.SGD(lr = 0.01)
class_model3.compile(optimizer = sgd9, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1346]:
class_model3.summary()
Model: "sequential_95"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_269 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_270 (Dense)            (None, 10)                100       
=================================================================
Total params: 235
Trainable params: 235
Non-trainable params: 0
_________________________________________________________________
In [1347]:
his3 = class_model3.fit(X_train_sd, y_train, epochs = 100, verbose = 1)
Epoch 1/100
34/34 [==============================] - 1s 1ms/step - loss: 2.2810 - accuracy: 0.3078
Epoch 2/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1918 - accuracy: 0.4635
Epoch 3/100
34/34 [==============================] - 0s 1ms/step - loss: 2.1114 - accuracy: 0.4761
Epoch 4/100
34/34 [==============================] - 0s 1ms/step - loss: 2.0291 - accuracy: 0.5028
Epoch 5/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9699 - accuracy: 0.4534
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8927 - accuracy: 0.4671
Epoch 7/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8302 - accuracy: 0.4735
Epoch 8/100
34/34 [==============================] - 0s 1ms/step - loss: 1.7735 - accuracy: 0.4537
Epoch 9/100
34/34 [==============================] - 0s 1ms/step - loss: 1.7141 - accuracy: 0.4493
Epoch 10/100
34/34 [==============================] - 0s 1ms/step - loss: 1.6748 - accuracy: 0.4616
Epoch 11/100
34/34 [==============================] - 0s 1ms/step - loss: 1.6203 - accuracy: 0.4716
Epoch 12/100
34/34 [==============================] - 0s 1ms/step - loss: 1.5830 - accuracy: 0.4656
Epoch 13/100
34/34 [==============================] - 0s 1ms/step - loss: 1.5280 - accuracy: 0.4699
Epoch 14/100
34/34 [==============================] - 0s 1ms/step - loss: 1.4845 - accuracy: 0.5032
Epoch 15/100
34/34 [==============================] - 0s 1ms/step - loss: 1.4679 - accuracy: 0.4997
Epoch 16/100
34/34 [==============================] - 0s 1ms/step - loss: 1.4282 - accuracy: 0.5076
Epoch 17/100
34/34 [==============================] - 0s 1ms/step - loss: 1.3646 - accuracy: 0.5286
Epoch 18/100
34/34 [==============================] - 0s 1ms/step - loss: 1.3706 - accuracy: 0.5279
Epoch 19/100
34/34 [==============================] - 0s 1ms/step - loss: 1.3474 - accuracy: 0.5331
Epoch 20/100
34/34 [==============================] - 0s 1ms/step - loss: 1.3318 - accuracy: 0.5325
Epoch 21/100
34/34 [==============================] - 0s 1ms/step - loss: 1.3090 - accuracy: 0.5272
Epoch 22/100
34/34 [==============================] - 0s 1ms/step - loss: 1.3168 - accuracy: 0.5310
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2734 - accuracy: 0.5428
Epoch 24/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2393 - accuracy: 0.5642
Epoch 25/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2411 - accuracy: 0.5635
Epoch 26/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1968 - accuracy: 0.5826
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2365 - accuracy: 0.5726
Epoch 28/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1833 - accuracy: 0.5769
Epoch 29/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1650 - accuracy: 0.5761
Epoch 30/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1609 - accuracy: 0.5922
Epoch 31/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1499 - accuracy: 0.5801
Epoch 32/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1717 - accuracy: 0.5804
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1351 - accuracy: 0.5663
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1119 - accuracy: 0.5895
Epoch 35/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1414 - accuracy: 0.5766
Epoch 36/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1243 - accuracy: 0.5748
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0691 - accuracy: 0.6014
Epoch 38/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0938 - accuracy: 0.5767
Epoch 39/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0591 - accuracy: 0.5950
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0897 - accuracy: 0.5855
Epoch 41/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0617 - accuracy: 0.5805
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0563 - accuracy: 0.5751
Epoch 43/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0718 - accuracy: 0.5957
Epoch 44/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0512 - accuracy: 0.5960
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0116 - accuracy: 0.6108
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0173 - accuracy: 0.5875
Epoch 47/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0368 - accuracy: 0.5908
Epoch 48/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0064 - accuracy: 0.5945
Epoch 49/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9978 - accuracy: 0.5955
Epoch 50/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9988 - accuracy: 0.6111
Epoch 51/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0189 - accuracy: 0.5928
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9689 - accuracy: 0.6440
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9946 - accuracy: 0.6263
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0062 - accuracy: 0.6200
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0023 - accuracy: 0.5940
Epoch 56/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9729 - accuracy: 0.6397
Epoch 57/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9995 - accuracy: 0.6208
Epoch 58/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9775 - accuracy: 0.6322
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0356 - accuracy: 0.6004
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0139 - accuracy: 0.5916
Epoch 61/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0310 - accuracy: 0.6053
Epoch 62/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0087 - accuracy: 0.6147
Epoch 63/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9637 - accuracy: 0.6177
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9982 - accuracy: 0.6153
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9880 - accuracy: 0.6143
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9846 - accuracy: 0.6266
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9728 - accuracy: 0.6211
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9979 - accuracy: 0.6074
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0025 - accuracy: 0.6114
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9668 - accuracy: 0.6150
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9623 - accuracy: 0.6268
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9673 - accuracy: 0.6181
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0011 - accuracy: 0.6044
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9812 - accuracy: 0.5985
Epoch 75/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9942 - accuracy: 0.6009
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9940 - accuracy: 0.5936
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9758 - accuracy: 0.6071
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9770 - accuracy: 0.6003
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0015 - accuracy: 0.5953
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9676 - accuracy: 0.6116
Epoch 81/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9271 - accuracy: 0.6157
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0053 - accuracy: 0.6014
Epoch 83/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9773 - accuracy: 0.6219
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 1.0023 - accuracy: 0.5956
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9673 - accuracy: 0.6175
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9743 - accuracy: 0.6060
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9905 - accuracy: 0.6131
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0042 - accuracy: 0.6027
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9394 - accuracy: 0.6020
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9257 - accuracy: 0.6390
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9484 - accuracy: 0.6166
Epoch 92/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9929 - accuracy: 0.6021
Epoch 93/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9666 - accuracy: 0.6029
Epoch 94/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9908 - accuracy: 0.5760
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9556 - accuracy: 0.6061
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9545 - accuracy: 0.6189
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9334 - accuracy: 0.6125
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9722 - accuracy: 0.5852
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9407 - accuracy: 0.6143
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9511 - accuracy: 0.6072
In [1348]:
class_model3.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 2ms/step - loss: 0.9641 - accuracy: 0.6041
Out[1348]:
[0.9641204476356506, 0.6041083335876465]

1. Adding Two Hidden Layers to Model

In [1349]:
# Initialize Sequential model
model18 = Sequential()

# Input Layer
model18.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding two Hidden layers
model18.add(Dense(6, activation='tanh', kernel_initializer = 'normal'))    # 2nd layer
model18.add(Dense(5, activation='tanh', kernel_initializer = 'normal'))    # 3rd layer

#Output layer
model18.add(Dense(10, activation='softmax', kernel_initializer = 'normal'))

sgd10 = optimizers.SGD(lr = 0.01)
model18.compile(optimizer = sgd10, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1350]:
model18.summary()
Model: "sequential_96"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_271 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_272 (Dense)            (None, 6)                 60        
_________________________________________________________________
dense_273 (Dense)            (None, 5)                 35        
_________________________________________________________________
dense_274 (Dense)            (None, 10)                60        
=================================================================
Total params: 290
Trainable params: 290
Non-trainable params: 0
_________________________________________________________________
In [1351]:
model18.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 2.2805 - accuracy: 0.4249
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1962 - accuracy: 0.4212
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 2.1159 - accuracy: 0.4428
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 2.0446 - accuracy: 0.4222
Epoch 5/100
34/34 [==============================] - 0s 1ms/step - loss: 1.9743 - accuracy: 0.4263
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9145 - accuracy: 0.4187
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.8570 - accuracy: 0.4297
Epoch 8/100
34/34 [==============================] - 0s 1ms/step - loss: 1.8000 - accuracy: 0.4342
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 1.7630 - accuracy: 0.4256
Epoch 10/100
34/34 [==============================] - 0s 1ms/step - loss: 1.7160 - accuracy: 0.4281
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 1.6808 - accuracy: 0.4411
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 1.6596 - accuracy: 0.4173
Epoch 13/100
34/34 [==============================] - 0s 1ms/step - loss: 1.6438 - accuracy: 0.4366
Epoch 14/100
34/34 [==============================] - 0s 1ms/step - loss: 1.5796 - accuracy: 0.4339
Epoch 15/100
34/34 [==============================] - 0s 1ms/step - loss: 1.5729 - accuracy: 0.4166
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 1.5389 - accuracy: 0.4575
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 1.5233 - accuracy: 0.4150
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4894 - accuracy: 0.4392
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4898 - accuracy: 0.3996
Epoch 20/100
34/34 [==============================] - 0s 1ms/step - loss: 1.4775 - accuracy: 0.4364
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4742 - accuracy: 0.4264
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4301 - accuracy: 0.4366
Epoch 23/100
34/34 [==============================] - 0s 1ms/step - loss: 1.4089 - accuracy: 0.4602
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4152 - accuracy: 0.4202
Epoch 25/100
34/34 [==============================] - 0s 1ms/step - loss: 1.4057 - accuracy: 0.4328
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4093 - accuracy: 0.4117
Epoch 27/100
34/34 [==============================] - 0s 1ms/step - loss: 1.3512 - accuracy: 0.4306
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3360 - accuracy: 0.4354
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3700 - accuracy: 0.4154
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3425 - accuracy: 0.4246
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3293 - accuracy: 0.4713
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3401 - accuracy: 0.4287
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3392 - accuracy: 0.4483
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3575 - accuracy: 0.4170
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3297 - accuracy: 0.4306
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3139 - accuracy: 0.4304
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3217 - accuracy: 0.4251
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2955 - accuracy: 0.4824
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3286 - accuracy: 0.4224
Epoch 40/100
34/34 [==============================] - ETA: 0s - loss: 1.2993 - accuracy: 0.43 - 0s 2ms/step - loss: 1.2992 - accuracy: 0.4315
Epoch 41/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2901 - accuracy: 0.4379
Epoch 42/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2801 - accuracy: 0.4346
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2759 - accuracy: 0.4266
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2539 - accuracy: 0.4319
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2902 - accuracy: 0.4197
Epoch 46/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2402 - accuracy: 0.4652
Epoch 47/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2928 - accuracy: 0.4286
Epoch 48/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2823 - accuracy: 0.4372
Epoch 49/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2672 - accuracy: 0.4262
Epoch 50/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2875 - accuracy: 0.4029
Epoch 51/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2765 - accuracy: 0.4489
Epoch 52/100
34/34 [==============================] - 0s 1ms/step - loss: 1.3056 - accuracy: 0.4196
Epoch 53/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2647 - accuracy: 0.4288
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2675 - accuracy: 0.4072
Epoch 55/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2511 - accuracy: 0.4450
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2541 - accuracy: 0.4403
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2499 - accuracy: 0.4414
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2153 - accuracy: 0.4482
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2210 - accuracy: 0.4389
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2533 - accuracy: 0.4363
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2402 - accuracy: 0.4465
Epoch 62/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2624 - accuracy: 0.4224
Epoch 63/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2202 - accuracy: 0.4220
Epoch 64/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2743 - accuracy: 0.4087
Epoch 65/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2648 - accuracy: 0.4186
Epoch 66/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2280 - accuracy: 0.4383
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2336 - accuracy: 0.4378
Epoch 68/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2222 - accuracy: 0.4334
Epoch 69/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2237 - accuracy: 0.4440
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2080 - accuracy: 0.4537
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2289 - accuracy: 0.4135
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2090 - accuracy: 0.4535
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2538 - accuracy: 0.4222
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2340 - accuracy: 0.4439
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2199 - accuracy: 0.4335
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2098 - accuracy: 0.4648
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2365 - accuracy: 0.4329
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2311 - accuracy: 0.4432
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2517 - accuracy: 0.4358
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1944 - accuracy: 0.4134
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2335 - accuracy: 0.4169
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2130 - accuracy: 0.4452
Epoch 83/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2119 - accuracy: 0.4217
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2247 - accuracy: 0.4429
Epoch 85/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2272 - accuracy: 0.4283
Epoch 86/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2069 - accuracy: 0.4300
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2397 - accuracy: 0.4333
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2537 - accuracy: 0.4318
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1951 - accuracy: 0.4373
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2047 - accuracy: 0.4286
Epoch 91/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2124 - accuracy: 0.4324
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2151 - accuracy: 0.4484
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1989 - accuracy: 0.4386
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2027 - accuracy: 0.4289
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1831 - accuracy: 0.4297
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2410 - accuracy: 0.4173
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1667 - accuracy: 0.4563
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1916 - accuracy: 0.4179
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1653 - accuracy: 0.4575
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2269 - accuracy: 0.4151
Out[1351]:
<tensorflow.python.keras.callbacks.History at 0x1a6d3d4a50>
In [1352]:
model18.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 1.2091 - accuracy: 0.4332
Out[1352]:
[1.2090646028518677, 0.43323996663093567]

2. Adding Four Hidden Layers to Model

In [1353]:
# Initialize Sequential model
model19 = Sequential()

# Input Layer
model19.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding four Hidden layers
model19.add(Dense(10, activation='sigmoid', kernel_initializer = 'normal'))    # 2nd layer
model19.add(Dense(20, activation='sigmoid', kernel_initializer = 'normal'))    # 3rd layer

# Hidden layers
model19.add(Dense(30, activation='sigmoid', kernel_initializer = 'normal'))     # 4th layer
model19.add(Dense(15, activation='sigmoid', kernel_initializer = 'normal'))     # 5th layer


#Output layer
model19.add(Dense(10, activation='softmax', kernel_initializer = 'normal'))

sgd11 = optimizers.SGD(lr = 0.01)
model19.compile(optimizer = sgd11, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1354]:
model19.summary()
Model: "sequential_97"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_275 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_276 (Dense)            (None, 10)                100       
_________________________________________________________________
dense_277 (Dense)            (None, 20)                220       
_________________________________________________________________
dense_278 (Dense)            (None, 30)                630       
_________________________________________________________________
dense_279 (Dense)            (None, 15)                465       
_________________________________________________________________
dense_280 (Dense)            (None, 10)                160       
=================================================================
Total params: 1,710
Trainable params: 1,710
Non-trainable params: 0
_________________________________________________________________
In [1355]:
model19.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 2.2522 - accuracy: 0.1862
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.9060 - accuracy: 0.4632
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.6916 - accuracy: 0.4107
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 1.5180 - accuracy: 0.4209
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.4479 - accuracy: 0.4187
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3450 - accuracy: 0.4333
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3471 - accuracy: 0.4370
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3383 - accuracy: 0.4341
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 1.3271 - accuracy: 0.3910
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2887 - accuracy: 0.4349
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2677 - accuracy: 0.4402
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2838 - accuracy: 0.4448
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2274 - accuracy: 0.4384
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2736 - accuracy: 0.4371
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2416 - accuracy: 0.4467
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2276 - accuracy: 0.4274
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2292 - accuracy: 0.4429
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2350 - accuracy: 0.4391
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2410 - accuracy: 0.4338
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2317 - accuracy: 0.4357
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2319 - accuracy: 0.4384
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2556 - accuracy: 0.4276
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2275 - accuracy: 0.4187
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1763 - accuracy: 0.4493
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2325 - accuracy: 0.4343
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1848 - accuracy: 0.4560
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2420 - accuracy: 0.4170
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1853 - accuracy: 0.4174
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2088 - accuracy: 0.4622
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2455 - accuracy: 0.4212
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2103 - accuracy: 0.4086
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1985 - accuracy: 0.4355
Epoch 33/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1743 - accuracy: 0.4516
Epoch 34/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2224 - accuracy: 0.4104
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2158 - accuracy: 0.4154
Epoch 36/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1661 - accuracy: 0.4398
Epoch 37/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1833 - accuracy: 0.4601
Epoch 38/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2517 - accuracy: 0.4165
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2006 - accuracy: 0.4110
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1825 - accuracy: 0.4338
Epoch 41/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2178 - accuracy: 0.4168
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2055 - accuracy: 0.4298
Epoch 43/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1810 - accuracy: 0.4475
Epoch 44/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1992 - accuracy: 0.4729
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1577 - accuracy: 0.4208
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2073 - accuracy: 0.4470
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2222 - accuracy: 0.4306
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1954 - accuracy: 0.4279
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1837 - accuracy: 0.4307
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2152 - accuracy: 0.4313
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1790 - accuracy: 0.4400
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2114 - accuracy: 0.4238
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2232 - accuracy: 0.4299
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1680 - accuracy: 0.4398
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1843 - accuracy: 0.4497
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2084 - accuracy: 0.4131
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1907 - accuracy: 0.4225
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2066 - accuracy: 0.4207
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2111 - accuracy: 0.4298
Epoch 60/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1919 - accuracy: 0.4299
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1597 - accuracy: 0.4585
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1981 - accuracy: 0.4455
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1666 - accuracy: 0.4393
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2249 - accuracy: 0.4308
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1603 - accuracy: 0.4446
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2018 - accuracy: 0.4244
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1877 - accuracy: 0.4533
Epoch 68/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1985 - accuracy: 0.4430
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1707 - accuracy: 0.4329
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2110 - accuracy: 0.4356
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1780 - accuracy: 0.4331
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1818 - accuracy: 0.4480
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2111 - accuracy: 0.4248
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1902 - accuracy: 0.4375
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1624 - accuracy: 0.4449
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2139 - accuracy: 0.3998
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2001 - accuracy: 0.4329
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1880 - accuracy: 0.4543
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1856 - accuracy: 0.4397
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1987 - accuracy: 0.4192
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1871 - accuracy: 0.4071
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2009 - accuracy: 0.4409
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2100 - accuracy: 0.4206
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2000 - accuracy: 0.4354
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2150 - accuracy: 0.4212
Epoch 86/100
34/34 [==============================] - 0s 1ms/step - loss: 1.1724 - accuracy: 0.4321
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1687 - accuracy: 0.4389
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1730 - accuracy: 0.4161
Epoch 89/100
34/34 [==============================] - 0s 1ms/step - loss: 1.2392 - accuracy: 0.4255
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1688 - accuracy: 0.4432
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1865 - accuracy: 0.4090
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1565 - accuracy: 0.4607
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1834 - accuracy: 0.4416
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2045 - accuracy: 0.4350
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1796 - accuracy: 0.4374
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1553 - accuracy: 0.4313
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1627 - accuracy: 0.4474
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1942 - accuracy: 0.4172
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2248 - accuracy: 0.4382
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1695 - accuracy: 0.4377
Out[1355]:
<tensorflow.python.keras.callbacks.History at 0x1a6d53c1d0>
In [1356]:
model19.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 1.1861 - accuracy: 0.4332
Out[1356]:
[1.1861464977264404, 0.43323996663093567]

B. --------------------Adam Optimizer---------------------

In [1357]:
# Adam Neural Network regression model after pca

class_model4 = Sequential()

# Input Layer

class_model4.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Output Layer

class_model4.add(Dense(10, kernel_initializer = 'normal', activation = 'softmax'))

adam9 = optimizers.Adam(lr = 0.01)
class_model4.compile(optimizer = adam9, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1358]:
class_model4.summary()
Model: "sequential_98"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_281 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_282 (Dense)            (None, 10)                100       
=================================================================
Total params: 235
Trainable params: 235
Non-trainable params: 0
_________________________________________________________________
In [1359]:
his4 = class_model4.fit(X_train_sd, y_train, epochs = 100, verbose = 1)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 2.0343 - accuracy: 0.3565
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1265 - accuracy: 0.5799
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9690 - accuracy: 0.5958
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9761 - accuracy: 0.5870
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9181 - accuracy: 0.6428
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9171 - accuracy: 0.6174
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9067 - accuracy: 0.6351
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9158 - accuracy: 0.6030
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9332 - accuracy: 0.6130
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9685 - accuracy: 0.6039
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9191 - accuracy: 0.6185
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9043 - accuracy: 0.6156
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8929 - accuracy: 0.6357
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8890 - accuracy: 0.6327
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8892 - accuracy: 0.6215
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9097 - accuracy: 0.6308
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8843 - accuracy: 0.6283
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8405 - accuracy: 0.6497
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8914 - accuracy: 0.6424
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9151 - accuracy: 0.5995
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8706 - accuracy: 0.6504
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8727 - accuracy: 0.6415
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8684 - accuracy: 0.6407
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8630 - accuracy: 0.6446
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8557 - accuracy: 0.6541
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8710 - accuracy: 0.6609
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8657 - accuracy: 0.6473
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8722 - accuracy: 0.6422
Epoch 29/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8869 - accuracy: 0.6119
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8633 - accuracy: 0.6294
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8521 - accuracy: 0.6581
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9032 - accuracy: 0.6078
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8771 - accuracy: 0.6197
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8571 - accuracy: 0.6480
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8703 - accuracy: 0.6205
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8410 - accuracy: 0.6595
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8369 - accuracy: 0.6471
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8517 - accuracy: 0.6543
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8811 - accuracy: 0.6309
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8187 - accuracy: 0.6470
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8389 - accuracy: 0.6301
Epoch 42/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8666 - accuracy: 0.6217
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8517 - accuracy: 0.6294
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8401 - accuracy: 0.6583
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8306 - accuracy: 0.6742
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8700 - accuracy: 0.6522
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8356 - accuracy: 0.6405
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8316 - accuracy: 0.6508
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8510 - accuracy: 0.6498
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8171 - accuracy: 0.6634
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8295 - accuracy: 0.6645
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8592 - accuracy: 0.6345
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8745 - accuracy: 0.6416
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8457 - accuracy: 0.6547
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8489 - accuracy: 0.6265
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8031 - accuracy: 0.6561
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8221 - accuracy: 0.6540
Epoch 58/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8035 - accuracy: 0.6797
Epoch 59/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8265 - accuracy: 0.6595
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8495 - accuracy: 0.6494
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8607 - accuracy: 0.6327
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8447 - accuracy: 0.6239
Epoch 63/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8712 - accuracy: 0.6297
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8538 - accuracy: 0.6513
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8339 - accuracy: 0.6521
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8412 - accuracy: 0.6403
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8140 - accuracy: 0.6699
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8134 - accuracy: 0.6615
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8390 - accuracy: 0.6633
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8631 - accuracy: 0.6404
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8479 - accuracy: 0.6535
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8190 - accuracy: 0.6538
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8354 - accuracy: 0.6566
Epoch 74/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8110 - accuracy: 0.6482
Epoch 75/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8344 - accuracy: 0.6425
Epoch 76/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8483 - accuracy: 0.6319
Epoch 77/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8073 - accuracy: 0.6683
Epoch 78/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8714 - accuracy: 0.6221
Epoch 79/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8771 - accuracy: 0.6333
Epoch 80/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8340 - accuracy: 0.6520
Epoch 81/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8224 - accuracy: 0.6528
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7984 - accuracy: 0.6770
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7998 - accuracy: 0.6520
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8188 - accuracy: 0.6684
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7899 - accuracy: 0.6575
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8127 - accuracy: 0.6521
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8038 - accuracy: 0.6620
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8617 - accuracy: 0.6280
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8076 - accuracy: 0.6464
Epoch 90/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8068 - accuracy: 0.6545
Epoch 91/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8613 - accuracy: 0.6438
Epoch 92/100
34/34 [==============================] - 0s 1ms/step - loss: 0.7902 - accuracy: 0.6604
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8167 - accuracy: 0.6546
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8129 - accuracy: 0.6522
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7966 - accuracy: 0.6463
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8478 - accuracy: 0.6352
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8205 - accuracy: 0.6644
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8519 - accuracy: 0.6245
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8271 - accuracy: 0.6564
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8496 - accuracy: 0.6173
In [1360]:
class_model4.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 2ms/step - loss: 0.8262 - accuracy: 0.6489
Out[1360]:
[0.8261608481407166, 0.6489262580871582]

1. Adding Two Hidden Layers to Model

In [1365]:
# Initialize Sequential model
model20 = Sequential()

# Input Layer
model20.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding two Hidden layers
model20.add(Dense(16, activation ='elu', kernel_initializer = 'normal'))    # 2nd layer
model20.add(Dense(8, activation ='elu', kernel_initializer = 'normal'))    # 3rd layer

#Output layer
model20.add(Dense(10, activation='softmax', kernel_initializer = 'normal'))

adam10 = optimizers.Adam(lr = 0.01)
model20.compile(optimizer = adam10, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1366]:
model20.summary()
Model: "sequential_100"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_287 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_288 (Dense)            (None, 16)                160       
_________________________________________________________________
dense_289 (Dense)            (None, 8)                 136       
_________________________________________________________________
dense_290 (Dense)            (None, 10)                90        
=================================================================
Total params: 521
Trainable params: 521
Non-trainable params: 0
_________________________________________________________________
In [1367]:
model20.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 1.9833 - accuracy: 0.4341
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1927 - accuracy: 0.3818
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1625 - accuracy: 0.4545
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1093 - accuracy: 0.5726
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0320 - accuracy: 0.5839
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9993 - accuracy: 0.5951
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8984 - accuracy: 0.6134
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9074 - accuracy: 0.6206
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9399 - accuracy: 0.6367
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8916 - accuracy: 0.6325
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9272 - accuracy: 0.6394
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8909 - accuracy: 0.6424
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9025 - accuracy: 0.6221
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8907 - accuracy: 0.6311
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8829 - accuracy: 0.6198
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8976 - accuracy: 0.6386
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8837 - accuracy: 0.6458
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8641 - accuracy: 0.6684
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8617 - accuracy: 0.6503
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8937 - accuracy: 0.6232
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8480 - accuracy: 0.6508
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8264 - accuracy: 0.6646
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8504 - accuracy: 0.6561
Epoch 24/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8527 - accuracy: 0.6502
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8280 - accuracy: 0.6663
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8449 - accuracy: 0.6447
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8296 - accuracy: 0.6585
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8853 - accuracy: 0.6444
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8086 - accuracy: 0.6706
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8427 - accuracy: 0.6536
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8282 - accuracy: 0.6598
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8904 - accuracy: 0.6312
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8319 - accuracy: 0.6550
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8046 - accuracy: 0.6686
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8475 - accuracy: 0.6574
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8076 - accuracy: 0.6786
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8337 - accuracy: 0.6436
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8333 - accuracy: 0.6572
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8298 - accuracy: 0.6424
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7954 - accuracy: 0.6684
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8254 - accuracy: 0.6683
Epoch 42/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8194 - accuracy: 0.6738
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8206 - accuracy: 0.6690
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8222 - accuracy: 0.6609
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8301 - accuracy: 0.6595
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8061 - accuracy: 0.6521
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8066 - accuracy: 0.6796
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8093 - accuracy: 0.6763
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7819 - accuracy: 0.6883
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7497 - accuracy: 0.6973
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7976 - accuracy: 0.6613
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8120 - accuracy: 0.6748
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7930 - accuracy: 0.6852
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7794 - accuracy: 0.6826
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7822 - accuracy: 0.6752
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7901 - accuracy: 0.6681
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7764 - accuracy: 0.6776
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7794 - accuracy: 0.6708
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7671 - accuracy: 0.6773
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7532 - accuracy: 0.6797
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8036 - accuracy: 0.7045
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7867 - accuracy: 0.6734
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7541 - accuracy: 0.7049
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7287 - accuracy: 0.7069
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7514 - accuracy: 0.6783
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7600 - accuracy: 0.6879
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7868 - accuracy: 0.6686
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7595 - accuracy: 0.6688
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7427 - accuracy: 0.6721
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7228 - accuracy: 0.7209
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7374 - accuracy: 0.6926
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7193 - accuracy: 0.7232
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7822 - accuracy: 0.6612
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7376 - accuracy: 0.7010
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7430 - accuracy: 0.6887
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7672 - accuracy: 0.6764
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7783 - accuracy: 0.6801
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7235 - accuracy: 0.6776
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7234 - accuracy: 0.6811
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7307 - accuracy: 0.6949
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7412 - accuracy: 0.6740
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7396 - accuracy: 0.6777
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7063 - accuracy: 0.6909
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6900 - accuracy: 0.7333
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7074 - accuracy: 0.6878
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7101 - accuracy: 0.7128
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7425 - accuracy: 0.7067
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7017 - accuracy: 0.6873
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7098 - accuracy: 0.7084
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7247 - accuracy: 0.6975
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6725 - accuracy: 0.7323
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6830 - accuracy: 0.7191
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7054 - accuracy: 0.7001
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7102 - accuracy: 0.7089
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6701 - accuracy: 0.7194
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7170 - accuracy: 0.6765
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6561 - accuracy: 0.7245
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6915 - accuracy: 0.7281
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6478 - accuracy: 0.7293
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 0.6593 - accuracy: 0.7225
Out[1367]:
<tensorflow.python.keras.callbacks.History at 0x1a6dc4f4d0>
In [1368]:
model20.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 0.6423 - accuracy: 0.7404
Out[1368]:
[0.642341673374176, 0.7404295206069946]

2. Adding Four Hidden Layers to Model

In [1369]:
# Initialize Sequential model
model21 = Sequential()

# Input Layer
model21.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding four Hidden layers
model21.add(Dense(45, activation='tanh', kernel_initializer = 'normal'))    # 2nd layer
model21.add(Dense(30, activation='tanh', kernel_initializer = 'normal'))     # 3rd layer

model21.add(Dense(10, activation='tanh', kernel_initializer = 'normal'))     # 4th layer
model21.add(Dense(50, activation='tanh', kernel_initializer = 'normal'))     # 5th layer


#Output layer
model21.add(Dense(10, activation='softmax', kernel_initializer = 'normal'))

adam11 = optimizers.Adam(lr = 0.01)
model21.compile(optimizer = adam11, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1370]:
model21.summary()
Model: "sequential_101"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_291 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_292 (Dense)            (None, 45)                450       
_________________________________________________________________
dense_293 (Dense)            (None, 30)                1380      
_________________________________________________________________
dense_294 (Dense)            (None, 10)                310       
_________________________________________________________________
dense_295 (Dense)            (None, 50)                550       
_________________________________________________________________
dense_296 (Dense)            (None, 10)                510       
=================================================================
Total params: 3,335
Trainable params: 3,335
Non-trainable params: 0
_________________________________________________________________
In [1371]:
model21.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 1.7267 - accuracy: 0.3685
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2127 - accuracy: 0.4416
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1844 - accuracy: 0.4393
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2019 - accuracy: 0.3949
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2146 - accuracy: 0.4129
Epoch 6/100
34/34 [==============================] - 0s 3ms/step - loss: 1.2061 - accuracy: 0.3992
Epoch 7/100
34/34 [==============================] - 0s 3ms/step - loss: 1.2192 - accuracy: 0.4161
Epoch 8/100
34/34 [==============================] - 0s 3ms/step - loss: 1.2185 - accuracy: 0.4271
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1713 - accuracy: 0.4262
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1778 - accuracy: 0.4025
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2010 - accuracy: 0.4026
Epoch 12/100
34/34 [==============================] - 0s 3ms/step - loss: 1.1721 - accuracy: 0.4326
Epoch 13/100
34/34 [==============================] - 0s 4ms/step - loss: 1.1536 - accuracy: 0.4645
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1886 - accuracy: 0.4265
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1723 - accuracy: 0.4398
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1238 - accuracy: 0.5436
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0691 - accuracy: 0.5876
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9984 - accuracy: 0.6156
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0138 - accuracy: 0.6192
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0141 - accuracy: 0.5953
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9913 - accuracy: 0.6070
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0271 - accuracy: 0.5717
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0205 - accuracy: 0.5724
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9448 - accuracy: 0.6249
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0069 - accuracy: 0.5790
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9837 - accuracy: 0.5844
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9459 - accuracy: 0.6069
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9123 - accuracy: 0.6263
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9589 - accuracy: 0.6088
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9341 - accuracy: 0.6165
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9086 - accuracy: 0.6272
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9372 - accuracy: 0.6438
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9540 - accuracy: 0.6171
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9475 - accuracy: 0.6162
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9786 - accuracy: 0.5943
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9817 - accuracy: 0.5952
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9785 - accuracy: 0.6016
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9837 - accuracy: 0.6106
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9360 - accuracy: 0.6392
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9475 - accuracy: 0.6241
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9722 - accuracy: 0.6106
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9495 - accuracy: 0.6296
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9513 - accuracy: 0.5801
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9179 - accuracy: 0.6278
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9197 - accuracy: 0.6385
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9610 - accuracy: 0.6214
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9269 - accuracy: 0.6188
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9212 - accuracy: 0.5993
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9727 - accuracy: 0.6039
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9577 - accuracy: 0.6150
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9425 - accuracy: 0.6267
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9545 - accuracy: 0.6128
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9326 - accuracy: 0.6061
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9236 - accuracy: 0.6303
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9038 - accuracy: 0.6304
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9008 - accuracy: 0.6399
Epoch 57/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8989 - accuracy: 0.6207
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9570 - accuracy: 0.6027
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9208 - accuracy: 0.6382
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9251 - accuracy: 0.6435
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9272 - accuracy: 0.6529
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9469 - accuracy: 0.6342
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9665 - accuracy: 0.6181
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8761 - accuracy: 0.6609
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9115 - accuracy: 0.6314
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9070 - accuracy: 0.6347
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8538 - accuracy: 0.6496
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8579 - accuracy: 0.6611
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8968 - accuracy: 0.6329
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8999 - accuracy: 0.6323
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9055 - accuracy: 0.6295
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9042 - accuracy: 0.6406
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9136 - accuracy: 0.6258
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8495 - accuracy: 0.6590
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8843 - accuracy: 0.6358
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9066 - accuracy: 0.6265
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9107 - accuracy: 0.6389
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9291 - accuracy: 0.6371
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8721 - accuracy: 0.6752
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8827 - accuracy: 0.6755
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8829 - accuracy: 0.6452
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8513 - accuracy: 0.6571
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8408 - accuracy: 0.6859
Epoch 84/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8646 - accuracy: 0.6628
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8949 - accuracy: 0.6514
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9082 - accuracy: 0.6296
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8853 - accuracy: 0.6460
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9510 - accuracy: 0.6335
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9504 - accuracy: 0.6474
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8813 - accuracy: 0.6540
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9075 - accuracy: 0.6408
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9329 - accuracy: 0.6374
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8843 - accuracy: 0.6491
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8750 - accuracy: 0.6577
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8648 - accuracy: 0.6670
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8633 - accuracy: 0.6837
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8905 - accuracy: 0.6537
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9339 - accuracy: 0.6447
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8721 - accuracy: 0.6523
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8738 - accuracy: 0.6673
Out[1371]:
<tensorflow.python.keras.callbacks.History at 0x1a6dc001d0>
In [1372]:
model21.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 0.8663 - accuracy: 0.6676
Out[1372]:
[0.8662595152854919, 0.6676003932952881]

C. --------------------RMSprop Optimizer---------------------

In [1373]:
# Rmsprop Neural Network regression model after pca

class_model5 = Sequential()

# Input Layer

class_model5.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Output Layer

class_model5.add(Dense(10, kernel_initializer = 'normal', activation = 'softmax'))

rms9 = optimizers.RMSprop(lr = 0.01)
class_model5.compile(optimizer = rms9, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1374]:
class_model5.summary()
Model: "sequential_102"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_297 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_298 (Dense)            (None, 10)                100       
=================================================================
Total params: 235
Trainable params: 235
Non-trainable params: 0
_________________________________________________________________
In [1375]:
his5 = class_model5.fit(X_train_sd, y_train, epochs = 100, verbose = 1)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 1.6749 - accuracy: 0.5084
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9938 - accuracy: 0.6109
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0101 - accuracy: 0.5645
Epoch 4/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9579 - accuracy: 0.6115
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9636 - accuracy: 0.6140
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9395 - accuracy: 0.6152
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9328 - accuracy: 0.6297
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9444 - accuracy: 0.6185
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9315 - accuracy: 0.6348
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8917 - accuracy: 0.6345
Epoch 11/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9049 - accuracy: 0.6336
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9533 - accuracy: 0.6159
Epoch 13/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8853 - accuracy: 0.6287
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9066 - accuracy: 0.6120
Epoch 15/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9238 - accuracy: 0.6107
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8679 - accuracy: 0.6406
Epoch 17/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8767 - accuracy: 0.6396
Epoch 18/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8773 - accuracy: 0.6306
Epoch 19/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8638 - accuracy: 0.6586
Epoch 20/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9217 - accuracy: 0.5913
Epoch 21/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9263 - accuracy: 0.6230
Epoch 22/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9182 - accuracy: 0.6230
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8632 - accuracy: 0.6336
Epoch 24/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9023 - accuracy: 0.6038
Epoch 25/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8911 - accuracy: 0.6105
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8661 - accuracy: 0.6496
Epoch 27/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8660 - accuracy: 0.6376
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8521 - accuracy: 0.6491
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9079 - accuracy: 0.6384
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8560 - accuracy: 0.6210
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8684 - accuracy: 0.6354
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8506 - accuracy: 0.6434
Epoch 33/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8408 - accuracy: 0.6513
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8802 - accuracy: 0.6386
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8696 - accuracy: 0.6401
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8997 - accuracy: 0.6336
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8022 - accuracy: 0.6681
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8689 - accuracy: 0.6133
Epoch 39/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8526 - accuracy: 0.6166
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8673 - accuracy: 0.6296
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8494 - accuracy: 0.6279
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8605 - accuracy: 0.6308
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8497 - accuracy: 0.6398
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8171 - accuracy: 0.6609
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8515 - accuracy: 0.6289
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8649 - accuracy: 0.6387
Epoch 47/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8518 - accuracy: 0.6522
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8445 - accuracy: 0.6552
Epoch 49/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8418 - accuracy: 0.6552
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8374 - accuracy: 0.6665
Epoch 51/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8797 - accuracy: 0.6201
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8186 - accuracy: 0.6693
Epoch 53/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8611 - accuracy: 0.6321
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8534 - accuracy: 0.6586
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8308 - accuracy: 0.6612
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8447 - accuracy: 0.6454
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8119 - accuracy: 0.6522
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8596 - accuracy: 0.6189
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8402 - accuracy: 0.6324
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8450 - accuracy: 0.6537
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8211 - accuracy: 0.6666
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8823 - accuracy: 0.6222
Epoch 63/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8078 - accuracy: 0.6549
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8430 - accuracy: 0.6204
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8242 - accuracy: 0.6432
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8226 - accuracy: 0.6543
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8309 - accuracy: 0.6643
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8566 - accuracy: 0.6593
Epoch 69/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8356 - accuracy: 0.6402
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8799 - accuracy: 0.6238
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8276 - accuracy: 0.6703
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8473 - accuracy: 0.6377
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8217 - accuracy: 0.6727
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7985 - accuracy: 0.6779
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8306 - accuracy: 0.6351
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8214 - accuracy: 0.6456
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8600 - accuracy: 0.6491
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8077 - accuracy: 0.6624
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8327 - accuracy: 0.6280
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8169 - accuracy: 0.6621
Epoch 81/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8347 - accuracy: 0.6461
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8247 - accuracy: 0.5952
Epoch 83/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8206 - accuracy: 0.6684
Epoch 84/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8018 - accuracy: 0.6648
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8573 - accuracy: 0.6102
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8206 - accuracy: 0.6453
Epoch 87/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8059 - accuracy: 0.6490
Epoch 88/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8316 - accuracy: 0.6515
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8094 - accuracy: 0.6564
Epoch 90/100
34/34 [==============================] - 0s 1ms/step - loss: 0.7848 - accuracy: 0.6709
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7696 - accuracy: 0.6897
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8385 - accuracy: 0.6255
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8301 - accuracy: 0.6639
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8483 - accuracy: 0.6675
Epoch 95/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8465 - accuracy: 0.6464
Epoch 96/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8208 - accuracy: 0.6582
Epoch 97/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8372 - accuracy: 0.6376
Epoch 98/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8189 - accuracy: 0.6476
Epoch 99/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8435 - accuracy: 0.6328
Epoch 100/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8219 - accuracy: 0.6609
In [1376]:
class_model5.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 0.8202 - accuracy: 0.6452
Out[1376]:
[0.8201697468757629, 0.6451914310455322]

1. Adding Two Hidden Layers to Model

In [1377]:
# Initialize Sequential model
model22 = Sequential()

# Input Layer
model22.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding two Hidden layers
model22.add(Dense(35, activation='sigmoid', kernel_initializer = 'normal'))    # 2nd layer
model22.add(Dense(15, activation='sigmoid', kernel_initializer = 'normal'))    # 3rd layer

#Output layer
model22.add(Dense(10, activation='softmax', kernel_initializer = 'normal'))

rms10 = optimizers.RMSprop(lr = 0.01)
model22.compile(optimizer = rms10, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1378]:
model22.summary()
Model: "sequential_103"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_299 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_300 (Dense)            (None, 35)                350       
_________________________________________________________________
dense_301 (Dense)            (None, 15)                540       
_________________________________________________________________
dense_302 (Dense)            (None, 10)                160       
=================================================================
Total params: 1,185
Trainable params: 1,185
Non-trainable params: 0
_________________________________________________________________
In [1379]:
model22.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 1.4928 - accuracy: 0.4246
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1511 - accuracy: 0.4537
Epoch 3/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1861 - accuracy: 0.3843
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1576 - accuracy: 0.4943
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.1209 - accuracy: 0.5452
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0696 - accuracy: 0.5927
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0415 - accuracy: 0.5945
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9794 - accuracy: 0.6070
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9666 - accuracy: 0.6224
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9696 - accuracy: 0.6149
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9752 - accuracy: 0.5915
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9820 - accuracy: 0.6181
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9673 - accuracy: 0.6062
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9868 - accuracy: 0.6071
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9544 - accuracy: 0.6186
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9786 - accuracy: 0.5964
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9557 - accuracy: 0.6192
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9489 - accuracy: 0.6059
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9028 - accuracy: 0.6449
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9229 - accuracy: 0.6178
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9571 - accuracy: 0.6251
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9126 - accuracy: 0.6267
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9001 - accuracy: 0.6349
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9330 - accuracy: 0.6051
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9543 - accuracy: 0.6003
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9218 - accuracy: 0.6132
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9599 - accuracy: 0.5940
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9331 - accuracy: 0.6141
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9335 - accuracy: 0.6268
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9219 - accuracy: 0.6237
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9125 - accuracy: 0.6263
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9289 - accuracy: 0.6011
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9041 - accuracy: 0.6267
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9021 - accuracy: 0.6212
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9018 - accuracy: 0.6507
Epoch 36/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9391 - accuracy: 0.6109
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9225 - accuracy: 0.6070
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8956 - accuracy: 0.6271
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9257 - accuracy: 0.6269
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9605 - accuracy: 0.5927
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8975 - accuracy: 0.6204
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9011 - accuracy: 0.6252
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9000 - accuracy: 0.6419
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8501 - accuracy: 0.6547
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8784 - accuracy: 0.6293
Epoch 46/100
34/34 [==============================] - 0s 1ms/step - loss: 0.9025 - accuracy: 0.6320
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8610 - accuracy: 0.6637
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8473 - accuracy: 0.6624
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8831 - accuracy: 0.6477
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9129 - accuracy: 0.6326
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8831 - accuracy: 0.6315
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9462 - accuracy: 0.6320
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8575 - accuracy: 0.6656
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8676 - accuracy: 0.6568
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8735 - accuracy: 0.6480
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8775 - accuracy: 0.6436
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8753 - accuracy: 0.6649
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8756 - accuracy: 0.6164
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8636 - accuracy: 0.6617
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8817 - accuracy: 0.6551
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8831 - accuracy: 0.6425
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8828 - accuracy: 0.6371
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8413 - accuracy: 0.6598
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8616 - accuracy: 0.6515
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8767 - accuracy: 0.6213
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8837 - accuracy: 0.6384
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8712 - accuracy: 0.6473
Epoch 68/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8334 - accuracy: 0.6748
Epoch 69/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8581 - accuracy: 0.6592
Epoch 70/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8867 - accuracy: 0.6378
Epoch 71/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8690 - accuracy: 0.6515
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8388 - accuracy: 0.6627
Epoch 73/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8143 - accuracy: 0.6589
Epoch 74/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8668 - accuracy: 0.6420
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8342 - accuracy: 0.6638
Epoch 76/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8752 - accuracy: 0.6243
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8279 - accuracy: 0.6645
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8257 - accuracy: 0.6617
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8295 - accuracy: 0.6565
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7923 - accuracy: 0.6598
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8556 - accuracy: 0.6852
Epoch 82/100
34/34 [==============================] - 0s 1ms/step - loss: 0.8045 - accuracy: 0.6581
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8038 - accuracy: 0.6675
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7941 - accuracy: 0.6632
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8100 - accuracy: 0.6754
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8401 - accuracy: 0.6579
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7838 - accuracy: 0.6925
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7873 - accuracy: 0.6794
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8632 - accuracy: 0.6556
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8238 - accuracy: 0.6638
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7850 - accuracy: 0.6895
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8295 - accuracy: 0.6506
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8094 - accuracy: 0.6748
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8230 - accuracy: 0.6636
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8221 - accuracy: 0.6744
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8352 - accuracy: 0.6541
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8406 - accuracy: 0.6706
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7840 - accuracy: 0.6658
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7660 - accuracy: 0.6768
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8022 - accuracy: 0.6691
Out[1379]:
<tensorflow.python.keras.callbacks.History at 0x1a6e12fb50>
In [1380]:
model22.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 0.7830 - accuracy: 0.6872
Out[1380]:
[0.7830007672309875, 0.6872082352638245]

2. Adding Four Hidden Layers to Model

In [1381]:
# Initialize Sequential model
model23 = Sequential()

# Input Layer
model23.add(Dense (9, input_dim = 14, kernel_initializer = 'normal', activation = 'relu'))

# Adding four Hidden layers
model23.add(Dense(20, activation='sigmoid', kernel_initializer = 'normal'))    # 2nd layer
model23.add(Dense(55, activation='sigmoid', kernel_initializer = 'normal'))    # 3rd layer

model23.add(Dense(70, activation='sigmoid', kernel_initializer = 'normal'))     # 4th layer
model23.add(Dense(50, activation='sigmoid', kernel_initializer = 'normal'))     # 5th layer


#Output layer
model23.add(Dense(10, activation='softmax', kernel_initializer = 'normal'))

rms11 = optimizers.RMSprop(lr = 0.01)
model23.compile(optimizer = rms11, loss = 'categorical_crossentropy', metrics = ['accuracy'])
In [1382]:
model23.summary()
Model: "sequential_104"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_303 (Dense)            (None, 9)                 135       
_________________________________________________________________
dense_304 (Dense)            (None, 20)                200       
_________________________________________________________________
dense_305 (Dense)            (None, 55)                1155      
_________________________________________________________________
dense_306 (Dense)            (None, 70)                3920      
_________________________________________________________________
dense_307 (Dense)            (None, 50)                3550      
_________________________________________________________________
dense_308 (Dense)            (None, 10)                510       
=================================================================
Total params: 9,470
Trainable params: 9,470
Non-trainable params: 0
_________________________________________________________________
In [1383]:
model23.fit(X_train_sd, y_train, epochs = 100)
Epoch 1/100
34/34 [==============================] - 1s 2ms/step - loss: 1.3798 - accuracy: 0.3838
Epoch 2/100
34/34 [==============================] - 0s 2ms/step - loss: 1.2471 - accuracy: 0.4155
Epoch 3/100
34/34 [==============================] - 0s 3ms/step - loss: 1.0884 - accuracy: 0.5071
Epoch 4/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0100 - accuracy: 0.5978
Epoch 5/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0266 - accuracy: 0.5888
Epoch 6/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9951 - accuracy: 0.5938
Epoch 7/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0187 - accuracy: 0.5800
Epoch 8/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9742 - accuracy: 0.5994
Epoch 9/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9689 - accuracy: 0.6089
Epoch 10/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9868 - accuracy: 0.5836
Epoch 11/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9622 - accuracy: 0.6032
Epoch 12/100
34/34 [==============================] - 0s 2ms/step - loss: 1.0231 - accuracy: 0.5894
Epoch 13/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9882 - accuracy: 0.5932
Epoch 14/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9767 - accuracy: 0.6099
Epoch 15/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9622 - accuracy: 0.6017
Epoch 16/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9720 - accuracy: 0.6149
Epoch 17/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9363 - accuracy: 0.6237
Epoch 18/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9713 - accuracy: 0.5926
Epoch 19/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9506 - accuracy: 0.6017
Epoch 20/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9345 - accuracy: 0.6173
Epoch 21/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9285 - accuracy: 0.6458
Epoch 22/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9276 - accuracy: 0.6270
Epoch 23/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9338 - accuracy: 0.6170
Epoch 24/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9447 - accuracy: 0.6287
Epoch 25/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9471 - accuracy: 0.6033
Epoch 26/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9889 - accuracy: 0.5939
Epoch 27/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9333 - accuracy: 0.6394
Epoch 28/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8968 - accuracy: 0.6408
Epoch 29/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9186 - accuracy: 0.6348
Epoch 30/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9292 - accuracy: 0.6265
Epoch 31/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9195 - accuracy: 0.6254
Epoch 32/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9136 - accuracy: 0.6269
Epoch 33/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9158 - accuracy: 0.6432
Epoch 34/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8641 - accuracy: 0.6816
Epoch 35/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9246 - accuracy: 0.6158
Epoch 36/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8934 - accuracy: 0.6422
Epoch 37/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9377 - accuracy: 0.6187
Epoch 38/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8872 - accuracy: 0.6374
Epoch 39/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8733 - accuracy: 0.6503
Epoch 40/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9108 - accuracy: 0.6132
Epoch 41/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9146 - accuracy: 0.6318
Epoch 42/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9046 - accuracy: 0.6574
Epoch 43/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8749 - accuracy: 0.6568
Epoch 44/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8808 - accuracy: 0.6405
Epoch 45/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8935 - accuracy: 0.6541
Epoch 46/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8705 - accuracy: 0.6562
Epoch 47/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9363 - accuracy: 0.6326
Epoch 48/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8877 - accuracy: 0.6337
Epoch 49/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8831 - accuracy: 0.6572
Epoch 50/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8817 - accuracy: 0.6297
Epoch 51/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8676 - accuracy: 0.6610
Epoch 52/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9091 - accuracy: 0.6408
Epoch 53/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8895 - accuracy: 0.6363
Epoch 54/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9015 - accuracy: 0.6497
Epoch 55/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8712 - accuracy: 0.6511
Epoch 56/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8704 - accuracy: 0.6484
Epoch 57/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8922 - accuracy: 0.6415
Epoch 58/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8613 - accuracy: 0.6541
Epoch 59/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8801 - accuracy: 0.6514
Epoch 60/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8744 - accuracy: 0.6399
Epoch 61/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8911 - accuracy: 0.6417
Epoch 62/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9202 - accuracy: 0.6446
Epoch 63/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8671 - accuracy: 0.6760
Epoch 64/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8977 - accuracy: 0.6639
Epoch 65/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8955 - accuracy: 0.6534
Epoch 66/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8720 - accuracy: 0.6605
Epoch 67/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8865 - accuracy: 0.6415
Epoch 68/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8926 - accuracy: 0.6627
Epoch 69/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8527 - accuracy: 0.6564
Epoch 70/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8579 - accuracy: 0.6633
Epoch 71/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9037 - accuracy: 0.6537
Epoch 72/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9029 - accuracy: 0.6237
Epoch 73/100
34/34 [==============================] - 0s 3ms/step - loss: 0.9065 - accuracy: 0.6338
Epoch 74/100
34/34 [==============================] - 0s 3ms/step - loss: 0.8551 - accuracy: 0.6693
Epoch 75/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8424 - accuracy: 0.6671
Epoch 76/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8017 - accuracy: 0.6876
Epoch 77/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8468 - accuracy: 0.6786
Epoch 78/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9150 - accuracy: 0.6543
Epoch 79/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8657 - accuracy: 0.6617
Epoch 80/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8274 - accuracy: 0.6740
Epoch 81/100
34/34 [==============================] - 0s 2ms/step - loss: 0.9073 - accuracy: 0.6419
Epoch 82/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8715 - accuracy: 0.6528
Epoch 83/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8338 - accuracy: 0.6846
Epoch 84/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8599 - accuracy: 0.6570
Epoch 85/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8456 - accuracy: 0.6656
Epoch 86/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8328 - accuracy: 0.6919
Epoch 87/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8483 - accuracy: 0.6678
Epoch 88/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8545 - accuracy: 0.6757
Epoch 89/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8858 - accuracy: 0.6510
Epoch 90/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8342 - accuracy: 0.6897
Epoch 91/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8307 - accuracy: 0.6673
Epoch 92/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8411 - accuracy: 0.6751
Epoch 93/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7698 - accuracy: 0.7004
Epoch 94/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8234 - accuracy: 0.6744
Epoch 95/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8078 - accuracy: 0.6900
Epoch 96/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7939 - accuracy: 0.6935
Epoch 97/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8176 - accuracy: 0.6754
Epoch 98/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8126 - accuracy: 0.6918
Epoch 99/100
34/34 [==============================] - 0s 2ms/step - loss: 0.8065 - accuracy: 0.6897
Epoch 100/100
34/34 [==============================] - 0s 2ms/step - loss: 0.7657 - accuracy: 0.6917
Out[1383]:
<tensorflow.python.keras.callbacks.History at 0x1a6e3444d0>
In [1384]:
model23.evaluate(X_train_sd, y_train)
34/34 [==============================] - 0s 1ms/step - loss: 0.8274 - accuracy: 0.6713
Out[1384]:
[0.8274499177932739, 0.6713352203369141]

Observation: Our best scores for regression & classification are highligted below.

// Model Scores Regression :-

====> Regression Before PCA :

  1. SGD - loss: 2.0075 - mean_absolute_error: 1.2159

    SGD Two Hidden Layer - loss: 2.0075 - mean_absolute_error: 1.2159

    SGD Four Hidden Layer - loss: 2.0075 - mean_absolute_error: 1.2159

  1. Adam - loss: 2.0075 - mean_absolute_error: 1.2159

    Adam Two Hidden Layer - loss: 2.0075 - mean_absolute_error: 1.2159

    Adam Four Hidden Layer - loss: 2.0075 - mean_absolute_error: 1.2159

  1. RMSprop - loss: 2.0075 - mean_absolute_error: 1.2159

    RMSprop Two Hidden Layer - loss: 2.0075 - mean_absolute_error: 1.2159

    RMSprop Four Hidden Layer - loss: 2.0075 - mean_absolute_error: 1.2159

====> Regression After PCA :

  1. SGD - loss: 2.0075 - mean_absolute_error: 1.2159

    SGD Two Hidden Layer - loss: 2.0075 - mean_absolute_error: 1.2159

    SGD Four Hidden Layer - loss: 2.0075 - mean_absolute_error: 1.2159

  1. Adam - loss: 2.0075 - mean_absolute_error: 1.2159

    Adam Two Hidden Layer - loss: 2.0075 - mean_absolute_error: 1.2159

    Adam Four Hidden Layer - loss: 2.0075 - mean_absolute_error: 1.2159

  1. RMSprop - loss: 2.0075 - mean_absolute_error: 1.2159

    RMSprop Two Hidden Layer - loss: 2.0075 - mean_absolute_error: 1.2159

    RMSprop Four Hidden Layer - loss: 2.0075 - mean_absolute_error: 1.2159

// Model Scores Classification :-

====> Classification Before PCA :

  1. SGD - loss: 0.9575 - accuracy: 0.6162

    SGD Two Hidden Layer - loss: 1.2038 - accuracy: 0.4332

    SGD Four Hidden Layer - loss: 1.1859 - accuracy: 0.4332

  1. Adam - loss: 0.8157 - accuracy: 0.6583

    Adam Two Hidden Layer - loss: 0.5889 - accuracy: 0.7479

    Adam Four Hidden Layer - loss: 0.8692 - accuracy: 0.6218

  1. RMSprop - loss: 0.8127 - accuracy: 0.6676

    RMSprop Two Hidden Layer - loss: 0.7937 - accuracy: 0.6863

    RMSprop Four Hidden Layer - loss: 0.7640 - accuracy: 0.7171

====> Classification After PCA :

  1. SGD - loss: 0.9641 - accuracy: 0.6041

    SGD Two Hidden Layer - loss: 1.2091 - accuracy: 0.4332

    SGD Four Hidden Layer - loss: 1.1861 - accuracy: 0.4332

  1. Adam - loss: 0.8262 - accuracy: 0.6489

    Adam Two Hidden Layer - loss: 0.6423 - accuracy: 0.7404

    Adam Four Hidden Layer - loss: 0.8663 - accuracy: 0.6676

  1. RMSprop - loss: 0.8202 - accuracy: 0.6452

    RMSprop Two Hidden Layer - loss: 0.7830 - accuracy: 0.6872

    RMSprop Four Hidden Layer - loss: 0.8274 - accuracy: 0.6713

---------------------------------XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX--------------------------------------

PART THREE // GUI

In [1389]:
# BUILDING A GRAPHICAL USER INTERFACE

from tkinter import *
window = Tk()    


#click function
def click():
    entered_text=textbox.get()
    textbox1.delete(0.0,END)
    try:
        definition=my_compdict[entered_text]
    except:
        definition="Sorry"
    textbox.insert(END,definition)

#Exit_Button_Function
def close_window():
    top.destroy()
    exit()

    
window.title('Neural Network Project')  


#Label 1
Label (window, text = "Setp 1 : File Name : ", bg = 'gray',fg = 'white').grid(row = 1,column = 1,sticky = W)
#Textbox
textbox = Entry(window, width = 14, bg = "white")
textbox.grid(row = 1,column = 2,sticky = E)
#Button
Button(window, text = "Import Data", width = 10,command = click).grid(row = 1,column = 6,sticky = E)
#Textbox
textbox = Entry(window, width = 14, bg = "white")
textbox.grid(row = 1,column = 9,sticky = E)



#Label 2
Label (window, text = "Setp 2 : Target Column : ", bg = 'gray',fg = 'white').grid(row = 2,column = 1,sticky = W)
#Textbox
textbox = Entry(window, width = 14, bg = "white")
textbox.grid(row = 2,column = 2,sticky = E)
#Button
Button(window, text = "Import Target", width = 11,command = click).grid(row = 2,column = 6,sticky = E)
#Textbox
textbox = Entry(window, width = 14, bg = "white")
textbox.grid(row = 2,column = 9,sticky = E)



#Label 3
Label (window, text = "Setp 3 : Neural Network Regressor ", bg = 'gray',fg = 'white').grid(row = 3,column = 1,sticky = W)



#Label 4
Label (window, text = " Regression ", bg = 'gray',fg = 'white').grid(row = 4,column = 1,sticky = W)
#Button
Button(window, text = "Train", width = 6,command = click).grid(row = 4,column = 2,sticky = E)
#Textbox
textbox = Entry(window, width = 14, bg = "white")
textbox.grid(row = 4,column = 6,sticky = E)



#Label 5
Label (window, text = " Pickle ", bg = 'gray',fg = 'white').grid(row = 5,column = 1,sticky = W)
#Button
Button(window, text = "Run", width = 6,command = click).grid(row = 5,column = 2,sticky = E)
#Textbox
textbox = Entry(window, width = 14, bg = "white")
textbox.grid(row = 5,column = 6,sticky = E)



#Label 6
Label (window, text = "Setp 4 : Neural Network Classifier ", bg = 'gray',fg = 'white').grid(row = 6,column = 1,sticky = W)



#Label 7
Label (window, text = " Classifier ", bg = 'gray',fg = 'white').grid(row = 7,column = 1,sticky = W)
#Button
Button(window, text = "Train", width = 6,command = click).grid(row = 7,column = 2,sticky = E)
#Textbox
textbox = Entry(window, width = 14, bg = "white")
textbox.grid(row = 7,column = 6,sticky = E)



#Button
Button(window, text = "Run", width = 6,command = click).grid(row = 8,column = 2,sticky = E)
#Textbox
textbox = Entry(window, width = 14, bg = "white")
textbox.grid(row = 8,column = 6,sticky = E)


window.mainloop()

---------------------------------XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX--------------------------------------

PART FOUR // SVHM

1. Import Data.

In [314]:
# Importing the necessary file 

import h5py         # Importing the h5py library
from sklearn.preprocessing import OneHotEncoder        # Importing necessary library
from sklearn.metrics import confusion_matrix           # Importing confusion matrix
from skimage.color import rgb2gray               # Importing rgb color library

2. Data pre-processing and visualisation.

In [315]:
with h5py.File('SVHN_single.h5', 'r') as h5file:
    ls = list(h5file.keys())
    print('List of datasets in this file: \n', ls)
    X_train = h5file.get('X_train')
    X_test = h5file.get('X_test')
    X_val = h5file.get('X_val')
    y_train = h5file.get('y_train')
    y_test = h5file.get('y_test')
    y_val = h5file.get('y_val')
    X_train = np.array(X_train)
    X_test = np.array(X_test)
    X_val = np.array(X_val)
    y_train = np.array(y_train)
    y_test = np.array(y_test)
    y_val = np.array(y_val)
List of datasets in this file: 
 ['X_test', 'X_train', 'X_val', 'y_test', 'y_train', 'y_val']
In [316]:
# Analysing the shape of the data 

print('Training set :', X_train.shape, y_train.shape)
print('Test set :', X_test.shape, y_test.shape)
print('Validation Set :', X_val.shape, y_val.shape)
Training set : (42000, 32, 32) (42000,)
Test set : (18000, 32, 32) (18000,)
Validation Set : (60000, 32, 32) (60000,)
In [317]:
# Merging validation set into training set prior data splitting

X_train = np.concatenate((X_train, X_val[:60000]))
y_train = np.concatenate((y_train, y_val[:60000]))
In [318]:
del X_val, y_val
In [319]:
# Shape of data after merging

print('Training set :', X_train.shape, y_train.shape)
print('Test set :', X_test.shape, y_test.shape)
Training set : (102000, 32, 32) (102000,)
Test set : (18000, 32, 32) (18000,)
In [320]:
# Function to plot sample images

def plot_images(images, labels, num_row = 2, num_col = 5):
    
    plt.rcParams['axes.grid'] = False
    fig, axes = plt.subplots(num_row, num_col, figsize = (2*num_col, 2*num_row))
    
    for i in range(num_row * num_col):
        ax = axes[i//num_col, i%num_col]
        ax.imshow(images[i], cmap = 'gray')
        ax.set_title(labels[i], weight = 'bold', fontsize = 20)
    plt.tight_layout
        
In [321]:
plot_images(X_train, y_train)
In [322]:
plot_images(X_test, y_test)
In [323]:
# Function to plot distribution of data

def plot_distribution(y1, y2, title1, title2):

    plt.rcParams['axes.facecolor'] = '#E6E6E6'
    plt.rcParams['axes.grid'] = True
    plt.rcParams['axes.axisbelow'] = True
    plt.rcParams['grid.color'] = 'w'
    plt.rcParams['figure.figsize'] = (12, 4)

    fig, (ax1, ax2) = plt.subplots(1, 2, sharex=True)
    fig.suptitle('Class Distribution', fontsize=15, fontweight='bold', y=1.05)

    ax1.bar(np.arange(10),np.bincount(y1))
    ax1.set_title(title1)
    ax1.set_xlim(-0.5, 9.5)
    ax1.set_xticks(np.arange(10))
    ax2.bar(np.arange(10),np.bincount(y2),color='coral')
    ax2.set_title(title2)

    fig.tight_layout()
In [324]:
# Plotting class distribution of training set and test set

plot_distribution(y_train, y_test, "Training set", "Test set")
In [325]:
# Splitting train set into train and validation set

X_train, X_val, y_train, y_val = train_test_split(X_train, y_train, test_size=0.20, random_state=42)
In [326]:
# Plotting class distribution of training set and validation set

plot_distribution(y_train, y_val, "Training set", "Validation set")
In [327]:
# Calculate the mean and the std on the training dataset

train_mean = np.mean(X_train, axis=0)
train_std = np.std(X_train, axis=0)

Normalize images :

Normalization refers to normalizing the data dimensions so that they are of approximately the same scale. Divide each dimension by its standard deviation, once it has been zero-centered. alt text

In [328]:
X_train = (X_train - train_mean) / train_std
X_test = (X_test - train_mean)  / train_std
X_val = (X_val - train_mean) / train_std
In [329]:
plot_images(X_train, y_train)
In [330]:
# Fit the OneHotEncoder

enc = OneHotEncoder().fit(y_train.reshape(-1, 1))
In [331]:
# Transform the label values to a one-hot-encoding scheme (ready for CNN)

y_train = enc.transform(y_train.reshape(-1, 1)).toarray()
y_test = enc.transform(y_test.reshape(-1, 1)).toarray()
y_val = enc.transform(y_val.reshape(-1, 1)).toarray()
In [332]:
# Y shapes after OneHotEncoding

print("Training set", y_train.shape)
print("Validation set", y_val.shape)
print("Test set", y_test.shape)
Training set (81600, 10)
Validation set (20400, 10)
Test set (18000, 10)
In [333]:
# Reshape X from 3 dimensions to 4 dimensions (ready for CNN)

X_train = X_train.reshape(-1,32,32,1)
X_test = X_test.reshape(-1,32,32,1)
X_val = X_val.reshape(-1,32,32,1)

4. Building, Training & Testing the model

In order to get more robust results out of our model, we are going to augment the images in the dataset, by randomly rotating them, zooming them in and out, shifting them up and down (IMPORTANT NOTE: It is best that we do not shift them horizontally, since there are also distracting digits in the images), shifting their channels and shearing them.

In [334]:
datagen = ImageDataGenerator(rotation_range=8,
                             zoom_range=[0.95, 1.05],
                             height_shift_range=0.10,
                             shear_range=0.15)
In [335]:
# Define CNN model

keras.backend.clear_session()

model = keras.Sequential([
    keras.layers.Conv2D(32, (3, 3), padding='same', 
                           activation='relu',
                           input_shape=(32, 32, 1)),
    keras.layers.BatchNormalization(),
    keras.layers.Conv2D(32, (3, 3), padding='same', 
                        activation='relu'),
    keras.layers.MaxPooling2D((2, 2)),
    keras.layers.Dropout(0.3),
    

    keras.layers.Conv2D(64, (3, 3), padding='same', 
                           activation='relu'),
    keras.layers.BatchNormalization(),
    keras.layers.Conv2D(64, (3, 3), padding='same',
                        activation='relu'),
    keras.layers.MaxPooling2D((2, 2)),
    keras.layers.Dropout(0.3),
    

    keras.layers.Conv2D(128, (3, 3), padding='same', 
                           activation='relu'),
    keras.layers.BatchNormalization(),
    keras.layers.Conv2D(128, (3, 3), padding='same',
                        activation='relu'),
    keras.layers.MaxPooling2D((2, 2)),
    keras.layers.Dropout(0.3),
    
    
    keras.layers.Flatten(),
    keras.layers.Dense(128, activation='relu'),
    keras.layers.Dropout(0.4),    
    keras.layers.Dense(10,  activation='softmax')
])

early_stopping = tensorflow.keras.callbacks.EarlyStopping(patience=8)
optimizer = tensorflow.keras.optimizers.Adam(amsgrad=True)
model_checkpoint = tensorflow.keras.callbacks.ModelCheckpoint('best_cnn.h5', 
                   save_best_only=True)
model.compile(optimizer=optimizer,
              loss='categorical_crossentropy',
              metrics=['accuracy'])

In order to determine a good learning rate for the optimizer of our model (here, we use the AMSGrad variant of the Adam optimizer), we set a callback in an auxillary model which will gradually increase the learning rate of the optimizer.

In [336]:
model.summary()
Model: "sequential"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
conv2d (Conv2D)              (None, 32, 32, 32)        320       
_________________________________________________________________
batch_normalization (BatchNo (None, 32, 32, 32)        128       
_________________________________________________________________
conv2d_1 (Conv2D)            (None, 32, 32, 32)        9248      
_________________________________________________________________
max_pooling2d (MaxPooling2D) (None, 16, 16, 32)        0         
_________________________________________________________________
dropout (Dropout)            (None, 16, 16, 32)        0         
_________________________________________________________________
conv2d_2 (Conv2D)            (None, 16, 16, 64)        18496     
_________________________________________________________________
batch_normalization_1 (Batch (None, 16, 16, 64)        256       
_________________________________________________________________
conv2d_3 (Conv2D)            (None, 16, 16, 64)        36928     
_________________________________________________________________
max_pooling2d_1 (MaxPooling2 (None, 8, 8, 64)          0         
_________________________________________________________________
dropout_1 (Dropout)          (None, 8, 8, 64)          0         
_________________________________________________________________
conv2d_4 (Conv2D)            (None, 8, 8, 128)         73856     
_________________________________________________________________
batch_normalization_2 (Batch (None, 8, 8, 128)         512       
_________________________________________________________________
conv2d_5 (Conv2D)            (None, 8, 8, 128)         147584    
_________________________________________________________________
max_pooling2d_2 (MaxPooling2 (None, 4, 4, 128)         0         
_________________________________________________________________
dropout_2 (Dropout)          (None, 4, 4, 128)         0         
_________________________________________________________________
flatten (Flatten)            (None, 2048)              0         
_________________________________________________________________
dense (Dense)                (None, 128)               262272    
_________________________________________________________________
dropout_3 (Dropout)          (None, 128)               0         
_________________________________________________________________
dense_1 (Dense)              (None, 10)                1290      
=================================================================
Total params: 550,890
Trainable params: 550,442
Non-trainable params: 448
_________________________________________________________________
In [337]:
# Fit model in order to determine best learning rate

history = model.fit_generator(datagen.flow(X_train, y_train, batch_size = 256),
                              epochs = 50, validation_data = (X_val, y_val),
                              callbacks = [early_stopping, model_checkpoint])
/Applications/anaconda3/lib/python3.7/site-packages/tensorflow/python/keras/engine/training.py:1844: UserWarning: `Model.fit_generator` is deprecated and will be removed in a future version. Please use `Model.fit`, which supports generators.
  warnings.warn('`Model.fit_generator` is deprecated and '
Epoch 1/50
319/319 [==============================] - 672s 2s/step - loss: 2.3993 - accuracy: 0.1001 - val_loss: 2.3027 - val_accuracy: 0.0991
Epoch 2/50
319/319 [==============================] - 638s 2s/step - loss: 2.2995 - accuracy: 0.1023 - val_loss: 1.9562 - val_accuracy: 0.2566
Epoch 3/50
319/319 [==============================] - 620s 2s/step - loss: 1.9514 - accuracy: 0.2595 - val_loss: 0.7016 - val_accuracy: 0.7831
Epoch 4/50
319/319 [==============================] - 618s 2s/step - loss: 1.1359 - accuracy: 0.5815 - val_loss: 0.3251 - val_accuracy: 0.9097
Epoch 5/50
319/319 [==============================] - 629s 2s/step - loss: 0.4692 - accuracy: 0.8615 - val_loss: 0.2477 - val_accuracy: 0.9380
Epoch 6/50
319/319 [==============================] - 619s 2s/step - loss: 0.3387 - accuracy: 0.9031 - val_loss: 0.2238 - val_accuracy: 0.9409
Epoch 7/50
319/319 [==============================] - 618s 2s/step - loss: 0.2914 - accuracy: 0.9178 - val_loss: 0.2036 - val_accuracy: 0.9475
Epoch 8/50
319/319 [==============================] - 621s 2s/step - loss: 0.2667 - accuracy: 0.9248 - val_loss: 0.1883 - val_accuracy: 0.9507
Epoch 9/50
319/319 [==============================] - 678s 2s/step - loss: 0.2455 - accuracy: 0.9290 - val_loss: 0.1784 - val_accuracy: 0.9528
Epoch 10/50
319/319 [==============================] - 677s 2s/step - loss: 0.2239 - accuracy: 0.9362 - val_loss: 0.1668 - val_accuracy: 0.9582
Epoch 11/50
319/319 [==============================] - 612s 2s/step - loss: 0.2126 - accuracy: 0.9387 - val_loss: 0.1652 - val_accuracy: 0.9591
Epoch 12/50
319/319 [==============================] - 611s 2s/step - loss: 0.2001 - accuracy: 0.9432 - val_loss: 0.1555 - val_accuracy: 0.9609
Epoch 13/50
319/319 [==============================] - 648s 2s/step - loss: 0.1911 - accuracy: 0.9447 - val_loss: 0.1546 - val_accuracy: 0.9616
Epoch 14/50
319/319 [==============================] - 618s 2s/step - loss: 0.1877 - accuracy: 0.9449 - val_loss: 0.1592 - val_accuracy: 0.9611
Epoch 15/50
319/319 [==============================] - 620s 2s/step - loss: 0.1804 - accuracy: 0.9494 - val_loss: 0.1428 - val_accuracy: 0.9639
Epoch 16/50
319/319 [==============================] - 686s 2s/step - loss: 0.1717 - accuracy: 0.9502 - val_loss: 0.1380 - val_accuracy: 0.9653
Epoch 17/50
319/319 [==============================] - 658s 2s/step - loss: 0.1692 - accuracy: 0.9516 - val_loss: 0.1424 - val_accuracy: 0.9641
Epoch 18/50
319/319 [==============================] - 618s 2s/step - loss: 0.1596 - accuracy: 0.9535 - val_loss: 0.1355 - val_accuracy: 0.9666
Epoch 19/50
319/319 [==============================] - 675s 2s/step - loss: 0.1564 - accuracy: 0.9549 - val_loss: 0.1329 - val_accuracy: 0.9670
Epoch 20/50
319/319 [==============================] - 667s 2s/step - loss: 0.1477 - accuracy: 0.9567 - val_loss: 0.1319 - val_accuracy: 0.9670
Epoch 21/50
319/319 [==============================] - 675s 2s/step - loss: 0.1474 - accuracy: 0.9572 - val_loss: 0.1356 - val_accuracy: 0.9694
Epoch 22/50
319/319 [==============================] - 685s 2s/step - loss: 0.1472 - accuracy: 0.9572 - val_loss: 0.1308 - val_accuracy: 0.9692
Epoch 23/50
319/319 [==============================] - 637s 2s/step - loss: 0.1388 - accuracy: 0.9591 - val_loss: 0.1282 - val_accuracy: 0.9683
Epoch 24/50
319/319 [==============================] - 662s 2s/step - loss: 0.1357 - accuracy: 0.9591 - val_loss: 0.1216 - val_accuracy: 0.9702
Epoch 25/50
319/319 [==============================] - 672s 2s/step - loss: 0.1324 - accuracy: 0.9609 - val_loss: 0.1170 - val_accuracy: 0.9709
Epoch 26/50
319/319 [==============================] - 673s 2s/step - loss: 0.1273 - accuracy: 0.9625 - val_loss: 0.1184 - val_accuracy: 0.9725
Epoch 27/50
319/319 [==============================] - 617s 2s/step - loss: 0.1277 - accuracy: 0.9624 - val_loss: 0.1167 - val_accuracy: 0.9725
Epoch 28/50
319/319 [==============================] - 616s 2s/step - loss: 0.1217 - accuracy: 0.9633 - val_loss: 0.1125 - val_accuracy: 0.9735
Epoch 29/50
319/319 [==============================] - 611s 2s/step - loss: 0.1287 - accuracy: 0.9615 - val_loss: 0.1117 - val_accuracy: 0.9715
Epoch 30/50
319/319 [==============================] - 634s 2s/step - loss: 0.1187 - accuracy: 0.9645 - val_loss: 0.1176 - val_accuracy: 0.9722
Epoch 31/50
319/319 [==============================] - 610s 2s/step - loss: 0.1195 - accuracy: 0.9655 - val_loss: 0.1065 - val_accuracy: 0.9739
Epoch 32/50
319/319 [==============================] - 611s 2s/step - loss: 0.1095 - accuracy: 0.9674 - val_loss: 0.1078 - val_accuracy: 0.9744
Epoch 33/50
319/319 [==============================] - 608s 2s/step - loss: 0.1148 - accuracy: 0.9660 - val_loss: 0.1129 - val_accuracy: 0.9734
Epoch 34/50
319/319 [==============================] - 608s 2s/step - loss: 0.1072 - accuracy: 0.9675 - val_loss: 0.1094 - val_accuracy: 0.9732
Epoch 35/50
319/319 [==============================] - 610s 2s/step - loss: 0.1075 - accuracy: 0.9672 - val_loss: 0.1097 - val_accuracy: 0.9750
Epoch 36/50
319/319 [==============================] - 606s 2s/step - loss: 0.1103 - accuracy: 0.9665 - val_loss: 0.1071 - val_accuracy: 0.9749
Epoch 37/50
319/319 [==============================] - 607s 2s/step - loss: 0.1069 - accuracy: 0.9669 - val_loss: 0.1036 - val_accuracy: 0.9762
Epoch 38/50
319/319 [==============================] - 605s 2s/step - loss: 0.1030 - accuracy: 0.9689 - val_loss: 0.1045 - val_accuracy: 0.9748
Epoch 39/50
319/319 [==============================] - 605s 2s/step - loss: 0.1020 - accuracy: 0.9690 - val_loss: 0.1077 - val_accuracy: 0.9750
Epoch 40/50
319/319 [==============================] - 607s 2s/step - loss: 0.1013 - accuracy: 0.9688 - val_loss: 0.0971 - val_accuracy: 0.9764
Epoch 41/50
319/319 [==============================] - 605s 2s/step - loss: 0.0982 - accuracy: 0.9701 - val_loss: 0.1018 - val_accuracy: 0.9765
Epoch 42/50
319/319 [==============================] - 608s 2s/step - loss: 0.0963 - accuracy: 0.9707 - val_loss: 0.0985 - val_accuracy: 0.9769
Epoch 43/50
319/319 [==============================] - 701s 2s/step - loss: 0.0934 - accuracy: 0.9714 - val_loss: 0.0984 - val_accuracy: 0.9773
Epoch 44/50
319/319 [==============================] - 621s 2s/step - loss: 0.0966 - accuracy: 0.9705 - val_loss: 0.0968 - val_accuracy: 0.9786
Epoch 45/50
319/319 [==============================] - 642s 2s/step - loss: 0.0916 - accuracy: 0.9720 - val_loss: 0.0919 - val_accuracy: 0.9773
Epoch 46/50
319/319 [==============================] - 629s 2s/step - loss: 0.0884 - accuracy: 0.9724 - val_loss: 0.0951 - val_accuracy: 0.9778
Epoch 47/50
319/319 [==============================] - 640s 2s/step - loss: 0.0898 - accuracy: 0.9726 - val_loss: 0.0906 - val_accuracy: 0.9782
Epoch 48/50
319/319 [==============================] - 620s 2s/step - loss: 0.0908 - accuracy: 0.9722 - val_loss: 0.0963 - val_accuracy: 0.9782
Epoch 49/50
319/319 [==============================] - 612s 2s/step - loss: 0.0877 - accuracy: 0.9726 - val_loss: 0.0948 - val_accuracy: 0.9793
Epoch 50/50
319/319 [==============================] - 613s 2s/step - loss: 0.0882 - accuracy: 0.9726 - val_loss: 0.0920 - val_accuracy: 0.9783

5. Visualizations and insights

In [338]:
# Evaluate train and validation accuracies and losses

train_acc = history.history['accuracy']
val_acc = history.history['val_accuracy']

train_loss = history.history['loss']
val_loss = history.history['val_loss']
In [339]:
# Visualize epochs vs. train and validation accuracies and losses

plt.figure(figsize=(20, 10))

plt.subplot(1, 2, 1)
plt.plot(train_acc, label = 'Training Accuracy')
plt.plot(val_acc, label = 'Validation Accuracy')
plt.legend()
plt.title('Epochs vs. Training and Validation Accuracy')
    
plt.subplot(1, 2, 2)
plt.plot(train_loss, label = 'Training Loss')
plt.plot(val_loss, label = 'Validation Loss')
plt.legend()
plt.title('Epochs vs. Training and Validation Loss')

plt.show()
In [340]:
# Evaluate model on test set

test_loss, test_acc = model.evaluate(x = X_test, y = y_test, verbose=0)

print('Test accuracy is: {:0.4f} \nTest loss is: {:0.4f}'.
      format(test_acc, test_loss))
Test accuracy is: 0.9845 
Test loss is: 0.0684
In [341]:
# Get predictions and apply inverse transformation to the labels

y_pred = model.predict(X_train)

y_pred = enc.inverse_transform(y_pred)
y_train = enc.inverse_transform(y_train)
In [342]:
# Plot the confusion matrix for training set

plt.figure(dpi=300)
cm = confusion_matrix(y_train, y_pred)
plt.title('Confusion matrix for training set', weight='bold')
sns.heatmap(cm,annot=True,fmt='g',cmap='coolwarm',annot_kws={"size": 12})
plt.xlabel('Predicted')
plt.ylabel('Actual')
plt.show()